Copy disabled (too large)
Download .txt
Showing preview only (21,047K chars total). Download the full file to get everything.
Repository: etalab/transport-site
Branch: master
Commit: 2914786e81c2
Files: 1573
Total size: 19.7 MB
Directory structure:
gitextract_hy3cfsqu/
├── .credo.exs
├── .dialyzer_ignore.exs
├── .editorconfig
├── .eslintignore
├── .formatter.exs
├── .github/
│ ├── CODEOWNERS
│ ├── actions/
│ │ └── checkout-compile/
│ │ └── action.yml
│ └── workflows/
│ ├── ops_tests.yml
│ ├── sentry_release.yml
│ ├── test.yml
│ └── trivy_scan.yml
├── .gitignore
├── .miniorc.template
├── .stylelintrc.json
├── .tool-versions
├── .vscode/
│ └── launch.json
├── Dockerfile
├── Dockerfile.dev
├── LICENSE.AGPL.txt
├── README.md
├── apps/
│ ├── shared/
│ │ ├── lib/
│ │ │ ├── application.ex
│ │ │ ├── appsignal_filter.ex
│ │ │ ├── cldr.ex
│ │ │ ├── conditional_json_encoder.ex
│ │ │ ├── data_visualization.ex
│ │ │ ├── date_time_display.ex
│ │ │ ├── hasher.ex
│ │ │ ├── helpers.ex
│ │ │ ├── http_stream_v2.ex
│ │ │ ├── proxy.ex
│ │ │ ├── req_custom_cache.ex
│ │ │ ├── resource_schema.ex
│ │ │ ├── s3.ex
│ │ │ ├── sentry_exception_filter.ex
│ │ │ ├── syntax_colors.ex
│ │ │ ├── time_wrapper.ex
│ │ │ ├── validation/
│ │ │ │ ├── gbfs_validator.ex
│ │ │ │ ├── gtfs_validator.ex
│ │ │ │ └── validator.ex
│ │ │ ├── wrapper/
│ │ │ │ ├── wrapper_httpoison.ex
│ │ │ │ └── wrapper_req.ex
│ │ │ └── wrapper_ex_aws.ex
│ │ ├── meta/
│ │ │ ├── schema-irve-dynamique.json
│ │ │ └── schema-irve-statique.json
│ │ ├── mix.exs
│ │ └── test/
│ │ ├── data_visualization_test.exs
│ │ ├── date_time_display_test.exs
│ │ ├── hasher_test.exs
│ │ ├── helpers/
│ │ │ └── helpers_test.exs
│ │ ├── http_stream_v2_test.exs
│ │ ├── resource_schema_test.exs
│ │ ├── s3_test.exs
│ │ ├── support/
│ │ │ ├── cache_case.ex
│ │ │ ├── mocks.ex
│ │ │ ├── s3_test_utils.ex
│ │ │ └── test_utils.ex
│ │ ├── test_helper.exs
│ │ ├── time_wrapper_test.exs
│ │ └── validation/
│ │ ├── gbfs_validator_test.exs
│ │ └── gtfs_validator_test.exs
│ └── transport/
│ ├── client/
│ │ ├── .prettierignore
│ │ ├── .prettierrc.json
│ │ ├── eslint.config.mjs
│ │ ├── javascripts/
│ │ │ ├── app.js
│ │ │ ├── autocomplete.js
│ │ │ ├── autocomplete_address.js
│ │ │ ├── clipboard.js
│ │ │ ├── dataset-map.js
│ │ │ ├── explore.js
│ │ │ ├── fullscreen_wrapper.js
│ │ │ ├── gtfs.js
│ │ │ ├── map-config.js
│ │ │ ├── map-geojson.js
│ │ │ ├── map.js
│ │ │ ├── resource-viz.js
│ │ │ ├── utils.js
│ │ │ ├── validation-map.js
│ │ │ └── vega.js
│ │ ├── package.json
│ │ ├── stylesheets/
│ │ │ ├── _states.scss
│ │ │ ├── app.scss
│ │ │ ├── components/
│ │ │ │ ├── _aom_table.scss
│ │ │ │ ├── _autocomplete.scss
│ │ │ │ ├── _backoffice.scss
│ │ │ │ ├── _blog.scss
│ │ │ │ ├── _choose_file.scss
│ │ │ │ ├── _colorful-button.scss
│ │ │ │ ├── _community-resources.scss
│ │ │ │ ├── _dataset-details.scss
│ │ │ │ ├── _discussions.scss
│ │ │ │ ├── _download_availability.scss
│ │ │ │ ├── _error.scss
│ │ │ │ ├── _explore.scss
│ │ │ │ ├── _feedback.scss
│ │ │ │ ├── _fullscreen-wrapper.scss
│ │ │ │ ├── _gtfs_diff.scss
│ │ │ │ ├── _guide.scss
│ │ │ │ ├── _icons.scss
│ │ │ │ ├── _landing_page_vls.scss
│ │ │ │ ├── _legal.scss
│ │ │ │ ├── _login.scss
│ │ │ │ ├── _logo.scss
│ │ │ │ ├── _mail.scss
│ │ │ │ ├── _mailing-list.scss
│ │ │ │ ├── _map-js.scss
│ │ │ │ ├── _message.scss
│ │ │ │ ├── _notification.scss
│ │ │ │ ├── _pagination.scss
│ │ │ │ ├── _resource-details.scss
│ │ │ │ ├── _search.scss
│ │ │ │ ├── _shortlist.scss
│ │ │ │ ├── _stats.scss
│ │ │ │ ├── _tooltip.scss
│ │ │ │ └── _validation.scss
│ │ │ ├── datasets.scss
│ │ │ ├── espace_producteur.scss
│ │ │ ├── globals/
│ │ │ │ ├── _externals.scss
│ │ │ │ ├── _mixins.scss
│ │ │ │ └── _variables.scss
│ │ │ ├── home.scss
│ │ │ ├── main.scss
│ │ │ ├── prism.css
│ │ │ ├── producteurs.scss
│ │ │ ├── reuser_space.scss
│ │ │ └── reuses.scss
│ │ ├── webpack.common.js
│ │ ├── webpack.dev.js
│ │ └── webpack.prod.js
│ ├── lib/
│ │ ├── S3/
│ │ │ ├── aggregates_uploader.ex
│ │ │ └── unzip.ex
│ │ ├── converters/
│ │ │ └── converter.ex
│ │ ├── data_frame/
│ │ │ ├── requiredness_processing.ex
│ │ │ └── validation_primitives.ex
│ │ ├── data_screens/
│ │ │ └── data_screens.ex
│ │ ├── datagouvfr/
│ │ │ ├── authentication.ex
│ │ │ ├── client/
│ │ │ │ ├── api.ex
│ │ │ │ ├── community_resources.ex
│ │ │ │ ├── datasets.ex
│ │ │ │ ├── discussions.ex
│ │ │ │ ├── oauth.ex
│ │ │ │ ├── organization.ex
│ │ │ │ ├── resources.ex
│ │ │ │ ├── reuses.ex
│ │ │ │ └── user.ex
│ │ │ └── client.ex
│ │ ├── db/
│ │ │ ├── administrative_division.ex
│ │ │ ├── aom.ex
│ │ │ ├── api_request.ex
│ │ │ ├── autocomplete.ex
│ │ │ ├── breaking_news.ex
│ │ │ ├── commune.ex
│ │ │ ├── company.ex
│ │ │ ├── contact.ex
│ │ │ ├── data_conversion.ex
│ │ │ ├── data_import.ex
│ │ │ ├── data_import_batch.ex
│ │ │ ├── dataset.ex
│ │ │ ├── dataset_follower.ex
│ │ │ ├── dataset_geographic_view.ex
│ │ │ ├── dataset_history.ex
│ │ │ ├── dataset_history_resources.ex
│ │ │ ├── dataset_monthly_metric.ex
│ │ │ ├── dataset_score.ex
│ │ │ ├── dataset_subtype.ex
│ │ │ ├── default_token.ex
│ │ │ ├── departement.ex
│ │ │ ├── encrypted/
│ │ │ │ └── binary.ex
│ │ │ ├── epci.ex
│ │ │ ├── feature_usage.ex
│ │ │ ├── geo_data/
│ │ │ │ ├── geo_data.ex
│ │ │ │ └── geo_data_import.ex
│ │ │ ├── gtfs/
│ │ │ │ ├── gtfs_agency.ex
│ │ │ │ ├── gtfs_calendar.ex
│ │ │ │ ├── gtfs_calendar_dates.ex
│ │ │ │ ├── gtfs_stop.ex
│ │ │ │ ├── gtfs_stop_times.ex
│ │ │ │ └── gtfs_trips.ex
│ │ │ ├── hidden_reuser_alert.ex
│ │ │ ├── irve_valid_file.ex
│ │ │ ├── irve_valid_pdc.ex
│ │ │ ├── logs_import.ex
│ │ │ ├── metrics.ex
│ │ │ ├── multi_validation.ex
│ │ │ ├── notification.ex
│ │ │ ├── notification_subscription.ex
│ │ │ ├── offer.ex
│ │ │ ├── organization.ex
│ │ │ ├── postgrex_types.ex
│ │ │ ├── processing_report.ex
│ │ │ ├── proxy_request.ex
│ │ │ ├── region.ex
│ │ │ ├── repo.ex
│ │ │ ├── resource.ex
│ │ │ ├── resource_download.ex
│ │ │ ├── resource_history.ex
│ │ │ ├── resource_metadata.ex
│ │ │ ├── resource_monthly_metric.ex
│ │ │ ├── resource_related.ex
│ │ │ ├── resource_unavailability.ex
│ │ │ ├── reuse.ex
│ │ │ ├── reuser_improved_data.ex
│ │ │ ├── stats_history.ex
│ │ │ ├── table_size_history.ex
│ │ │ ├── token.ex
│ │ │ └── user_feedback.ex
│ │ ├── ecto_interval.ex
│ │ ├── enroute/
│ │ │ └── chouette_valid_rulesets_client.ex
│ │ ├── gtfs/
│ │ │ └── utils.ex
│ │ ├── http/
│ │ │ └── utils.ex
│ │ ├── irve/
│ │ │ ├── data_frame.ex
│ │ │ ├── database_exporter.ex
│ │ │ ├── database_importer.ex
│ │ │ ├── deduplicator.ex
│ │ │ ├── dynamic_irve_schema.ex
│ │ │ ├── extractor.ex
│ │ │ ├── fetcher.ex
│ │ │ ├── http_pagination.ex
│ │ │ ├── processing.ex
│ │ │ ├── raw_report_item.ex
│ │ │ ├── raw_static_consolidation.ex
│ │ │ ├── simple_consolidation.ex
│ │ │ ├── simple_report_item.ex
│ │ │ ├── static_irve_schema.ex
│ │ │ ├── static_probes.ex
│ │ │ └── validator/
│ │ │ ├── data_frame_validation.ex
│ │ │ ├── field_validation.ex
│ │ │ ├── summary.ex
│ │ │ └── validator.ex
│ │ ├── jobs/
│ │ │ ├── RamboLauncher.ex
│ │ │ ├── analyze_irve_job.ex
│ │ │ ├── archive_metrics_job.ex
│ │ │ ├── backfill/
│ │ │ │ ├── backfill_metadata_non_gtfs_resource_history.ex
│ │ │ │ ├── backfill_resource_history_filesize.ex
│ │ │ │ ├── backfill_resource_history_resource_id.ex
│ │ │ │ ├── backfill_resource_history_schema_details.ex
│ │ │ │ └── remove_gtfs_rt_snapshots.ex
│ │ │ ├── clean_multi_validation_job.ex
│ │ │ ├── clean_on_demand_validation_job.ex
│ │ │ ├── consolidate_bnlc_job.ex
│ │ │ ├── consolidate_lez_job.ex
│ │ │ ├── conversions/
│ │ │ │ ├── clean_orphan_conversions_job.ex
│ │ │ │ ├── generic_converter.ex
│ │ │ │ ├── gtfs_generic_converter.ex
│ │ │ │ ├── gtfs_to_geojson_converter_job.ex
│ │ │ │ ├── netex_generic_converter.ex
│ │ │ │ └── netex_to_geojson_converter_job.ex
│ │ │ ├── create_tokens_job.ex
│ │ │ ├── custom_logo_conversion_job.ex
│ │ │ ├── database_backup_replication_job.ex
│ │ │ ├── database_vacuum_job.ex
│ │ │ ├── dataset_history_job.ex
│ │ │ ├── dataset_now_on_nap_notification_job.ex
│ │ │ ├── dataset_quality_score.ex
│ │ │ ├── datasets_climate_resilience_bill_not_lo_licence_job.ex
│ │ │ ├── datasets_switching_climate_resilience_bill_job.ex
│ │ │ ├── datasets_without_gtfs_rt_related_resources_notification_job.ex
│ │ │ ├── dedupe_history_job.ex
│ │ │ ├── default_tokens_job.ex
│ │ │ ├── expiration_notification_job.ex
│ │ │ ├── gbfs_multi_validation_job.ex
│ │ │ ├── gbfs_operators_notification_job.ex
│ │ │ ├── geo_data/
│ │ │ │ ├── base.ex
│ │ │ │ ├── bnlc_to_geo_data.ex
│ │ │ │ ├── gbfs_stations_to_geo_data.ex
│ │ │ │ ├── irve_to_geo_data.ex
│ │ │ │ └── lez_to_geo_data.ex
│ │ │ ├── gtfs_diff_job.ex
│ │ │ ├── gtfs_import_stops_job.ex
│ │ │ ├── gtfs_multi_validation_job.ex
│ │ │ ├── gtfs_rt_metadata.ex
│ │ │ ├── gtfs_rt_multi_validation_job.ex
│ │ │ ├── gtfs_to_db.ex
│ │ │ ├── import_companies_job.ex
│ │ │ ├── import_dataset_contact_points_job.ex
│ │ │ ├── import_dataset_follower_reuser_improved_data_job.ex
│ │ │ ├── import_dataset_followers_job.ex
│ │ │ ├── import_dataset_monthly_metrics_job.ex
│ │ │ ├── import_gbfs_feed_contact_email_job.ex
│ │ │ ├── import_resource_monthly_metrics_job.ex
│ │ │ ├── import_reuses_job.ex
│ │ │ ├── irve_raw_consolidation_job.ex
│ │ │ ├── irve_simple_consolidation_job.ex
│ │ │ ├── multi_validation_with_error_notification_job.ex
│ │ │ ├── netex_poller_job.ex
│ │ │ ├── new_comments_notification_job.ex
│ │ │ ├── new_datagouv_datasets_job.ex
│ │ │ ├── new_dataset_notifications_job.ex
│ │ │ ├── notification_subscription_producer_job.ex
│ │ │ ├── oban_logger.ex
│ │ │ ├── on_demand_netex_poller_job.ex
│ │ │ ├── on_demand_validation_helpers.ex
│ │ │ ├── on_demand_validation_job.ex
│ │ │ ├── periodic_reminder_producers_notification_job.ex
│ │ │ ├── promote_producer_space_job.ex
│ │ │ ├── promote_reuser_space_job.ex
│ │ │ ├── refresh_autocomplete_job.ex
│ │ │ ├── remove_history_job.ex
│ │ │ ├── resource_history_job.ex
│ │ │ ├── resource_history_jsonschema_validation_job.ex
│ │ │ ├── resource_history_schema_validation.ex
│ │ │ ├── resource_history_tableschema_validation_job.ex
│ │ │ ├── resource_history_validata_json_job.ex
│ │ │ ├── resource_history_validation_job.ex
│ │ │ ├── resource_unavailable_job.ex
│ │ │ ├── resource_unavailable_notification_job.ex
│ │ │ ├── resource_validation_job.ex
│ │ │ ├── resources_changed_notification_job.ex
│ │ │ ├── stops_registry_snapshot_job.ex
│ │ │ ├── table_size_history_job.ex
│ │ │ ├── update_contacts_job.ex
│ │ │ ├── update_counter_cache_job.ex
│ │ │ ├── visit_download_statistics_job.ex
│ │ │ ├── visit_proxy_statistics_job.ex
│ │ │ ├── visit_statistics_base.ex
│ │ │ ├── warn_user_inactivity_job.ex
│ │ │ ├── workflow.ex
│ │ │ └── workflow_dummy_jobs.ex
│ │ ├── mailer/
│ │ │ ├── admin_notifier.ex
│ │ │ ├── mailer.ex
│ │ │ └── user_notifier.ex
│ │ ├── mix/
│ │ │ └── tasks/
│ │ │ ├── dump_gtfs_rt.ex
│ │ │ ├── logs.ex
│ │ │ ├── npm.ex
│ │ │ ├── transport/
│ │ │ │ ├── add_dataset_subtypes.ex
│ │ │ │ ├── add_monaco.ex
│ │ │ │ ├── add_switzerland.ex
│ │ │ │ ├── import_aoms.ex
│ │ │ │ ├── import_communes.ex
│ │ │ │ ├── import_departements.ex
│ │ │ │ ├── import_epci.ex
│ │ │ │ ├── import_offers.ex
│ │ │ │ ├── open_api_spec.ex
│ │ │ │ └── update_france_geojson.ex
│ │ │ ├── url.ex
│ │ │ └── yarn.ex
│ │ ├── netex/
│ │ │ ├── Readme.md
│ │ │ ├── archive_parser.ex
│ │ │ ├── calendars_streaming_parser.ex
│ │ │ ├── chouette_valid_ruleset_generator.ex
│ │ │ ├── description_parser.ex
│ │ │ ├── enroute-starter-kit.json
│ │ │ ├── french_profile/
│ │ │ │ ├── v1.ex
│ │ │ │ └── v2.ex
│ │ │ ├── french_profile.ex
│ │ │ ├── netex_helpers.ex
│ │ │ ├── saxy_helpers.ex
│ │ │ ├── service_calendars_streaming_parser.ex
│ │ │ ├── stop_places_streaming_parser.ex
│ │ │ ├── to_geojson/
│ │ │ │ ├── coordinates.ex
│ │ │ │ ├── geojson_builder.ex
│ │ │ │ ├── parsers/
│ │ │ │ │ ├── quay_parser.ex
│ │ │ │ │ └── service_link_parser.ex
│ │ │ │ └── to_geojson.ex
│ │ │ └── types_of_frame_streaming_parser.ex
│ │ ├── queries/
│ │ │ └── dashboard_import_count.sql
│ │ ├── registry/
│ │ │ ├── engine.ex
│ │ │ ├── extractor.ex
│ │ │ ├── gtfs.ex
│ │ │ ├── model/
│ │ │ │ ├── data_source.ex
│ │ │ │ └── stop.ex
│ │ │ ├── netex.ex
│ │ │ └── result.ex
│ │ ├── siri_queries.ex
│ │ ├── siri_query_generator.ex
│ │ ├── support/
│ │ │ ├── logger/
│ │ │ │ └── translator.ex
│ │ │ ├── pretty_json_encoder.ex
│ │ │ └── proxy.ex
│ │ ├── transport/
│ │ │ ├── application.ex
│ │ │ ├── appsignal_ecto_telemetry.ex
│ │ │ ├── availability_checker.ex
│ │ │ ├── cache.ex
│ │ │ ├── cached_files.ex
│ │ │ ├── comments_checker.ex
│ │ │ ├── community_resource_cleaner.ex
│ │ │ ├── companies.ex
│ │ │ ├── consolidated_dataset.ex
│ │ │ ├── counter_cache.ex
│ │ │ ├── custom_search_message.ex
│ │ │ ├── data_checker.ex
│ │ │ ├── dataset_checks.ex
│ │ │ ├── dataset_index.ex
│ │ │ ├── expiration.ex
│ │ │ ├── explore_vehicle_positions_poller.ex
│ │ │ ├── file_downloads.ex
│ │ │ ├── gbfs_metadata.ex
│ │ │ ├── gbfs_to_geojson.ex
│ │ │ ├── gbfs_utils.ex
│ │ │ ├── gtfs_data.ex
│ │ │ ├── gtfs_diff.ex
│ │ │ ├── gtfs_export_stops.ex
│ │ │ ├── gtfs_import_stops.ex
│ │ │ ├── gtfs_query.ex
│ │ │ ├── gtfs_rt.ex
│ │ │ ├── history_fetcher.ex
│ │ │ ├── import_data.ex
│ │ │ ├── import_data_worker.ex
│ │ │ ├── log_cleaner.ex
│ │ │ ├── log_time_taken.ex
│ │ │ ├── notification_reason.ex
│ │ │ ├── phoenix_dashboard_telemetry.ex
│ │ │ ├── preemptive_api_cache.ex
│ │ │ ├── preemptive_base_cache.ex
│ │ │ ├── preemptive_home_stats_cache.ex
│ │ │ ├── preemptive_stats_cache.ex
│ │ │ ├── protobuf/
│ │ │ │ ├── gtfs-realtime.pb.ex
│ │ │ │ ├── gtfs-realtime.proto
│ │ │ │ └── readme.md
│ │ │ ├── quantum_scheduler.ex
│ │ │ ├── schemas.ex
│ │ │ ├── search_communes.ex
│ │ │ ├── stats_handler.ex
│ │ │ ├── telemetry.ex
│ │ │ ├── vault.ex
│ │ │ └── zip_probe.ex
│ │ ├── transport.ex
│ │ ├── transport_web/
│ │ │ ├── api/
│ │ │ │ ├── controllers/
│ │ │ │ │ ├── aom_controller.ex
│ │ │ │ │ ├── autocomplete_controller.ex
│ │ │ │ │ ├── datasets_controller.ex
│ │ │ │ │ ├── features_controller.ex
│ │ │ │ │ ├── geo_query_controller.ex
│ │ │ │ │ ├── gtfs_stops_controller.ex
│ │ │ │ │ ├── stats_controller.ex
│ │ │ │ │ └── validators_controller.ex
│ │ │ │ ├── plugs/
│ │ │ │ │ ├── auth.ex
│ │ │ │ │ ├── cache.ex
│ │ │ │ │ └── token_auth.ex
│ │ │ │ ├── router.ex
│ │ │ │ ├── schemas.ex
│ │ │ │ ├── spec.ex
│ │ │ │ └── views/
│ │ │ │ ├── aom_view.ex
│ │ │ │ ├── autocomplete_view.ex
│ │ │ │ ├── dataset_view.ex
│ │ │ │ ├── json_view.ex
│ │ │ │ └── stats_view.ex
│ │ │ ├── channels/
│ │ │ │ ├── explore_channel.ex
│ │ │ │ └── user_socket.ex
│ │ │ ├── components/
│ │ │ │ └── colorful_button.ex
│ │ │ ├── controllers/
│ │ │ │ ├── aoms_controller.ex
│ │ │ │ ├── atom_controller.ex
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── breaking_news_controller.ex
│ │ │ │ │ ├── broken_urls_controller.ex
│ │ │ │ │ ├── contact_controller.ex
│ │ │ │ │ ├── dashboard_controller.ex
│ │ │ │ │ ├── dataset_controller.ex
│ │ │ │ │ ├── gtfs_export_controller.ex
│ │ │ │ │ ├── notification_subscription_controller.ex
│ │ │ │ │ └── page_controller.ex
│ │ │ │ ├── contact_controller.ex
│ │ │ │ ├── conversion_controller.ex
│ │ │ │ ├── dataset_controller.ex
│ │ │ │ ├── discussion_controller.ex
│ │ │ │ ├── espace_producteur_controller.ex
│ │ │ │ ├── explore_controller.ex
│ │ │ │ ├── gbfs_analyzer_controller.ex
│ │ │ │ ├── gbfs_to_geojson_controller.ex
│ │ │ │ ├── geojson_conversion_controller.ex
│ │ │ │ ├── landing_pages_controller.ex
│ │ │ │ ├── page_controller.ex
│ │ │ │ ├── pagination_helpers.ex
│ │ │ │ ├── resource_controller.ex
│ │ │ │ ├── reuse_controller.ex
│ │ │ │ ├── reuser_space_controller.ex
│ │ │ │ ├── session_controller.ex
│ │ │ │ ├── stats_controller.ex
│ │ │ │ └── validation_controller.ex
│ │ │ ├── endpoint.ex
│ │ │ ├── gettext.ex
│ │ │ ├── live/
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── cache_live.ex
│ │ │ │ │ ├── cache_live.html.heex
│ │ │ │ │ ├── custom_tags_live.ex
│ │ │ │ │ ├── data_import_batch_report_live.ex
│ │ │ │ │ ├── data_import_batch_report_live.html.heex
│ │ │ │ │ ├── dataset_subtype_live.ex
│ │ │ │ │ ├── declarative_spatial_areas_live.ex
│ │ │ │ │ ├── edit_dataset_live.ex
│ │ │ │ │ ├── edit_dataset_live.html.heex
│ │ │ │ │ ├── email_preview_live.ex
│ │ │ │ │ ├── email_preview_live.html.heex
│ │ │ │ │ ├── irve_dashboard_live.ex
│ │ │ │ │ ├── irve_dashboard_live.html.heex
│ │ │ │ │ ├── jobs2_live.ex
│ │ │ │ │ ├── jobs2_live.html.heex
│ │ │ │ │ ├── jobs_live.ex
│ │ │ │ │ ├── jobs_live.html.heex
│ │ │ │ │ ├── jobs_table_component.ex
│ │ │ │ │ ├── legal_owner_select_live.ex
│ │ │ │ │ ├── offer_select_live.ex
│ │ │ │ │ ├── proxy_config_live.ex
│ │ │ │ │ ├── proxy_config_live.html.heex
│ │ │ │ │ ├── rate_limiter_live.ex
│ │ │ │ │ └── rate_limiter_live.html.heex
│ │ │ │ ├── dataset_notifications_live.ex
│ │ │ │ ├── dataset_notifications_live.html.heex
│ │ │ │ ├── discussions_live.ex
│ │ │ │ ├── feedback_live.ex
│ │ │ │ ├── feedback_live.html.heex
│ │ │ │ ├── follow_dataset_live.ex
│ │ │ │ ├── gtfs_diff_explain/
│ │ │ │ │ ├── explanations.ex
│ │ │ │ │ ├── structural_changes.ex
│ │ │ │ │ └── summary.ex
│ │ │ │ ├── gtfs_diff_explain.ex
│ │ │ │ ├── gtfs_diff_select_live/
│ │ │ │ │ ├── analysis.ex
│ │ │ │ │ ├── differences.ex
│ │ │ │ │ ├── gtfs_specification.ex
│ │ │ │ │ ├── results.ex
│ │ │ │ │ ├── setup.ex
│ │ │ │ │ ├── shared.ex
│ │ │ │ │ └── steps.ex
│ │ │ │ ├── gtfs_diff_select_live.ex
│ │ │ │ ├── gtfs_diff_select_live.html.heex
│ │ │ │ ├── notifications_live.ex
│ │ │ │ ├── notifications_live.html.heex
│ │ │ │ ├── on_demand_validation_live.ex
│ │ │ │ ├── on_demand_validation_live.html.heex
│ │ │ │ ├── on_demand_validation_select_live.ex
│ │ │ │ ├── on_demand_validation_select_live.html.heex
│ │ │ │ ├── proxy_requests_count_live.ex
│ │ │ │ ├── reuses_live.ex
│ │ │ │ ├── send_now_on_nap_notification_view.ex
│ │ │ │ ├── siri_querier_live.ex
│ │ │ │ ├── siri_querier_live.html.heex
│ │ │ │ ├── start_consolidate_job_view.ex
│ │ │ │ ├── user_space_datasets_live.ex
│ │ │ │ ├── validate_dataset_view.ex
│ │ │ │ └── validate_resource_live.ex
│ │ │ ├── plugs/
│ │ │ │ ├── custom_secure_browser_headers.ex
│ │ │ │ ├── halt.ex
│ │ │ │ ├── head.ex
│ │ │ │ ├── health_check.ex
│ │ │ │ ├── producer_data.ex
│ │ │ │ ├── put_locale.ex
│ │ │ │ ├── rate_limiter.ex
│ │ │ │ ├── reuser_data.ex
│ │ │ │ ├── router.ex
│ │ │ │ └── worker_healthcheck.ex
│ │ │ ├── presence.ex
│ │ │ ├── redirect.ex
│ │ │ ├── router.ex
│ │ │ ├── session.ex
│ │ │ ├── templates/
│ │ │ │ ├── aoms/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── atom/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── breaking_news/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── broken_urls/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── contact/
│ │ │ │ │ │ ├── _notification_subscriptions.html.heex
│ │ │ │ │ │ ├── _notifications.html.heex
│ │ │ │ │ │ ├── form.html.heex
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── dashboard/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ └── page/
│ │ │ │ │ ├── _dataset.html.heex
│ │ │ │ │ ├── form_dataset.html.heex
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── dataset/
│ │ │ │ │ ├── _banner.html.heex
│ │ │ │ │ ├── _community_resources.html.heex
│ │ │ │ │ ├── _community_ressource.html.heex
│ │ │ │ │ ├── _custom_message.html.heex
│ │ │ │ │ ├── _dataset_resources_history.html.heex
│ │ │ │ │ ├── _dataset_scores_chart.html.heex
│ │ │ │ │ ├── _dataset_type.html.heex
│ │ │ │ │ ├── _discussion.html.heex
│ │ │ │ │ ├── _header_links.html.heex
│ │ │ │ │ ├── _history_message.html.heex
│ │ │ │ │ ├── _licence.html.heex
│ │ │ │ │ ├── _notifications_sent.html.heex
│ │ │ │ │ ├── _resource.html.heex
│ │ │ │ │ ├── _resource_validation_summary.html.heex
│ │ │ │ │ ├── _resource_validation_summary_gtfs.html.heex
│ │ │ │ │ ├── _resource_validation_summary_netex.html.heex
│ │ │ │ │ ├── _resources_container.html.heex
│ │ │ │ │ ├── _reuser_message.html.heex
│ │ │ │ │ ├── details.html.heex
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── email/
│ │ │ │ │ ├── bnlc_consolidation_report.html.heex
│ │ │ │ │ ├── contact.html.heex
│ │ │ │ │ ├── dataset_now_on_nap.html.md
│ │ │ │ │ ├── dataset_with_error_producer.html.md
│ │ │ │ │ ├── dataset_with_error_reuser.html.md
│ │ │ │ │ ├── datasets_climate_resilience_bill_inappropriate_licence.html.heex
│ │ │ │ │ ├── datasets_switching_climate_resilience_bill.html.md
│ │ │ │ │ ├── datasets_without_gtfs_rt_related_resources.html.heex
│ │ │ │ │ ├── expiration.html.heex
│ │ │ │ │ ├── expiration_producer.html.md
│ │ │ │ │ ├── expiration_reuser.html.heex
│ │ │ │ │ ├── feedback.html.md
│ │ │ │ │ ├── inactive_datasets.html.heex
│ │ │ │ │ ├── new_comments_producer.html.heex
│ │ │ │ │ ├── new_comments_reuser.html.heex
│ │ │ │ │ ├── new_datagouv_datasets.html.heex
│ │ │ │ │ ├── new_dataset.html.heex
│ │ │ │ │ ├── oban_failure.html.md
│ │ │ │ │ ├── producer_with_subscriptions.html.md
│ │ │ │ │ ├── producer_without_subscriptions.html.md
│ │ │ │ │ ├── promote_producer_space.html.md
│ │ │ │ │ ├── promote_reuser_space.html.md
│ │ │ │ │ ├── resource_unavailable_producer.html.md
│ │ │ │ │ ├── resource_unavailable_reuser.html.md
│ │ │ │ │ ├── resources_changed.html.md
│ │ │ │ │ ├── unknown_gbfs_operator_feeds.html.heex
│ │ │ │ │ ├── visit_download_statistics.html.md
│ │ │ │ │ ├── visit_proxy_statistics.html.md
│ │ │ │ │ └── warn_inactivity.html.md
│ │ │ │ ├── error/
│ │ │ │ │ ├── 400_family_errors.html.heex
│ │ │ │ │ └── internal_server_error.html.heex
│ │ │ │ ├── espace_producteur/
│ │ │ │ │ ├── _important_information.html.heex
│ │ │ │ │ ├── _proxy_requests_stats.html.heex
│ │ │ │ │ ├── _specify_url.html.heex
│ │ │ │ │ ├── _upload_file.html.heex
│ │ │ │ │ ├── delete_resource_confirmation.html.heex
│ │ │ │ │ ├── discussions.html.heex
│ │ │ │ │ ├── download_statistics.html.heex
│ │ │ │ │ ├── edit_dataset.html.heex
│ │ │ │ │ ├── espace_producteur.html.heex
│ │ │ │ │ ├── proxy_statistics.html.heex
│ │ │ │ │ ├── resource_form.html.heex
│ │ │ │ │ └── reuser_improved_data.html.heex
│ │ │ │ ├── explore/
│ │ │ │ │ ├── explore.html.heex
│ │ │ │ │ └── gtfs_stops.html.heex
│ │ │ │ ├── gbfs_analyzer/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── geojson_conversion/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── landing_pages/
│ │ │ │ │ └── vls.html.heex
│ │ │ │ ├── layout/
│ │ │ │ │ ├── _breaking_news.html.heex
│ │ │ │ │ ├── _footer.html.heex
│ │ │ │ │ ├── _header.html.heex
│ │ │ │ │ ├── app.html.heex
│ │ │ │ │ └── email.html.heex
│ │ │ │ ├── page/
│ │ │ │ │ ├── _home_autocomplete.html.heex
│ │ │ │ │ ├── accessibility.html.md
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ ├── infos_producteurs.html.heex
│ │ │ │ │ ├── infos_reutilisateurs.html.heex
│ │ │ │ │ ├── join_the_community.html.heex
│ │ │ │ │ ├── login.html.heex
│ │ │ │ │ ├── missions.html.md
│ │ │ │ │ ├── nouveautes.html.md
│ │ │ │ │ └── single_page.html.heex
│ │ │ │ ├── resource/
│ │ │ │ │ ├── _download_availability.html.heex
│ │ │ │ │ ├── _errors_warnings_count.html.heex
│ │ │ │ │ ├── _geojson.html.heex
│ │ │ │ │ ├── _gtfs_coordinates_issue.html.heex
│ │ │ │ │ ├── _gtfs_duplicate_stop_sequence_issue.html.heex
│ │ │ │ │ ├── _gtfs_duplicate_stops_issue.html.heex
│ │ │ │ │ ├── _gtfs_extra_file_issue.html.heex
│ │ │ │ │ ├── _gtfs_generic_issue.html.heex
│ │ │ │ │ ├── _gtfs_invalid_shape_id_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_file_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_id_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_name_issue.html.heex
│ │ │ │ │ ├── _gtfs_negative_stop_duration_issue.html.heex
│ │ │ │ │ ├── _gtfs_no_calendar.html.heex
│ │ │ │ │ ├── _gtfs_rt.html.heex
│ │ │ │ │ ├── _gtfs_rt_errors_for_severity.html.heex
│ │ │ │ │ ├── _gtfs_rt_previous_validations_details.html.heex
│ │ │ │ │ ├── _gtfs_speed_issue.html.heex
│ │ │ │ │ ├── _gtfs_subfolder_issue.html.heex
│ │ │ │ │ ├── _gtfs_unloadable_model_issue.html.heex
│ │ │ │ │ ├── _gtfs_unusable_trip.html.heex
│ │ │ │ │ ├── _gtfs_unused_shape_issue.html.heex
│ │ │ │ │ ├── _gtfs_unused_stop_issue.html.heex
│ │ │ │ │ ├── _mobilitydata_metadata.html.heex
│ │ │ │ │ ├── _netex_generic_issue.html.heex
│ │ │ │ │ ├── _netex_validation_errors_v0_1_0.html.heex
│ │ │ │ │ ├── _netex_validation_errors_v0_2_x.html.heex
│ │ │ │ │ ├── _netex_xsd_schema.html.heex
│ │ │ │ │ ├── _on_demand_validation_hint.html.heex
│ │ │ │ │ ├── _related_resources.html.heex
│ │ │ │ │ ├── _requestor_ref.html.heex
│ │ │ │ │ ├── _resource_description.html.heex
│ │ │ │ │ ├── _resources_details_gtfs.html.heex
│ │ │ │ │ ├── _resources_details_netex.html.heex
│ │ │ │ │ ├── _resources_netex_validation_details.html.heex
│ │ │ │ │ ├── _search_bar.html.heex
│ │ │ │ │ ├── _validate_gbfs_now.html.heex
│ │ │ │ │ ├── _validate_gtfs_rt_now.html.heex
│ │ │ │ │ ├── _validation_report.html.heex
│ │ │ │ │ ├── _validation_report_gbfs.html.heex
│ │ │ │ │ ├── _validation_report_gtfs_rt.html.heex
│ │ │ │ │ ├── _validation_report_mobilitydata_gtfs.html.heex
│ │ │ │ │ ├── _validation_report_schema.html.heex
│ │ │ │ │ ├── _validation_summary.html.heex
│ │ │ │ │ ├── details.html.heex
│ │ │ │ │ ├── gtfs_details.html.heex
│ │ │ │ │ └── netex_details.html.heex
│ │ │ │ ├── reuse/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── reuser_space/
│ │ │ │ │ ├── datasets_edit.html.heex
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ ├── new_token.html.heex
│ │ │ │ │ └── settings.html.heex
│ │ │ │ ├── stats/
│ │ │ │ │ ├── _maps.html.heex
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ └── metabase_dashboard.html.heex
│ │ │ │ └── validation/
│ │ │ │ ├── expired.html.heex
│ │ │ │ ├── show_gtfs.html.heex
│ │ │ │ ├── show_irve_statique.html.heex
│ │ │ │ ├── show_netex_v0_1_0.html.heex
│ │ │ │ └── show_netex_v0_2_x.html.heex
│ │ │ └── views/
│ │ │ ├── aoms_view.ex
│ │ │ ├── atom_view.ex
│ │ │ ├── backoffice/
│ │ │ │ ├── breaking_news_view.ex
│ │ │ │ ├── broken_urls_view.ex
│ │ │ │ ├── contact_view.ex
│ │ │ │ ├── dashboard_view.ex
│ │ │ │ └── page_view.ex
│ │ │ ├── bread_crumbs.ex
│ │ │ ├── dataset_view.ex
│ │ │ ├── email_view.ex
│ │ │ ├── error_helpers.ex
│ │ │ ├── error_view.ex
│ │ │ ├── espace_producteur_view.ex
│ │ │ ├── explore_view.ex
│ │ │ ├── gbfs_anaylzer_view.ex
│ │ │ ├── geojson_conversion_view.ex
│ │ │ ├── input_helpers.ex
│ │ │ ├── landing_pages_view.ex
│ │ │ ├── layout_view.ex
│ │ │ ├── markdown_handler.ex
│ │ │ ├── netex_report_components.ex
│ │ │ ├── page_view.ex
│ │ │ ├── resource_view.ex
│ │ │ ├── reuse_view.ex
│ │ │ ├── reuser_space_view.ex
│ │ │ ├── seo_metadata.ex
│ │ │ ├── stats_view.ex
│ │ │ └── validation_view.ex
│ │ ├── transport_web.ex
│ │ ├── unlock/
│ │ │ ├── aggregate_processor.ex
│ │ │ ├── batch_metrics.ex
│ │ │ ├── cached_fetch.ex
│ │ │ ├── config.ex
│ │ │ ├── controller.ex
│ │ │ ├── dynamic_irve/
│ │ │ │ ├── controller.ex
│ │ │ │ ├── feed_store.ex
│ │ │ │ ├── feed_worker.ex
│ │ │ │ └── renderer.ex
│ │ │ ├── dynamic_irve_supervisor.ex
│ │ │ ├── enforce_ttl.ex
│ │ │ ├── gunzip_tools.ex
│ │ │ ├── http_client.ex
│ │ │ ├── params.ex
│ │ │ ├── plugs/
│ │ │ │ └── token_auth.ex
│ │ │ ├── router.ex
│ │ │ ├── shared.ex
│ │ │ ├── siri.ex
│ │ │ ├── telemetry.ex
│ │ │ ├── views/
│ │ │ │ └── error_view.ex
│ │ │ └── xml_helper.ex
│ │ ├── validators/
│ │ │ ├── enroute_chouette_valid_client.ex
│ │ │ ├── gbfs_validator.ex
│ │ │ ├── gtfs_rt_validator.ex
│ │ │ ├── gtfs_transport_validator.ex
│ │ │ ├── jsonschema_validata_json_validator.ex
│ │ │ ├── jsonschema_validator.ex
│ │ │ ├── mobilitydata_gtfs_validator.ex
│ │ │ ├── mobilitydata_gtfs_validator_client.ex
│ │ │ ├── netex/
│ │ │ │ ├── CHANGELOG.md
│ │ │ │ ├── metadata_extractor.ex
│ │ │ │ ├── results_adapter.ex
│ │ │ │ ├── results_adapters/
│ │ │ │ │ ├── commons.ex
│ │ │ │ │ ├── v0_1_0.ex
│ │ │ │ │ ├── v0_2_0.ex
│ │ │ │ │ ├── v0_2_1.ex
│ │ │ │ │ └── v0_2_2.ex
│ │ │ │ └── validator.ex
│ │ │ ├── tableschema_validator.ex
│ │ │ ├── validator.ex
│ │ │ └── validator_selection.ex
│ │ └── zip.ex
│ ├── mix.exs
│ ├── priv/
│ │ ├── facilitators.csv
│ │ ├── gbfs_operators.csv
│ │ ├── gettext/
│ │ │ ├── administrative_division.pot
│ │ │ ├── alert.pot
│ │ │ ├── autocomplete.pot
│ │ │ ├── backoffice.pot
│ │ │ ├── backoffice_dataset.pot
│ │ │ ├── climate-resilience-bill.pot
│ │ │ ├── community_resource.pot
│ │ │ ├── contact.pot
│ │ │ ├── datagouv-dataset.pot
│ │ │ ├── dataset.pot
│ │ │ ├── db-dataset-score.pot
│ │ │ ├── db-dataset.pot
│ │ │ ├── db-resource-related.pot
│ │ │ ├── default.pot
│ │ │ ├── download-link.pot
│ │ │ ├── en/
│ │ │ │ └── LC_MESSAGES/
│ │ │ │ ├── administrative_division.po
│ │ │ │ ├── alert.po
│ │ │ │ ├── autocomplete.po
│ │ │ │ ├── backoffice.po
│ │ │ │ ├── backoffice_dataset.po
│ │ │ │ ├── climate-resilience-bill.po
│ │ │ │ ├── community_resource.po
│ │ │ │ ├── contact.po
│ │ │ │ ├── datagouv-dataset.po
│ │ │ │ ├── dataset.po
│ │ │ │ ├── db-dataset-score.po
│ │ │ │ ├── db-dataset.po
│ │ │ │ ├── db-resource-related.po
│ │ │ │ ├── default.po
│ │ │ │ ├── download-link.po
│ │ │ │ ├── errors.po
│ │ │ │ ├── espace-producteurs.po
│ │ │ │ ├── explore.po
│ │ │ │ ├── feedback.po
│ │ │ │ ├── gbfs_analyzer.po
│ │ │ │ ├── gtfs-diff.po
│ │ │ │ ├── gtfs-file-descriptions.po
│ │ │ │ ├── gtfs-transport-validator.po
│ │ │ │ ├── helper.po
│ │ │ │ ├── landing-vls.po
│ │ │ │ ├── netex-documentation.po
│ │ │ │ ├── netex-validator.po
│ │ │ │ ├── notification_subscription.po
│ │ │ │ ├── page-dataset-details.po
│ │ │ │ ├── page-index.po
│ │ │ │ ├── page-login.po
│ │ │ │ ├── page-nouveautes.po
│ │ │ │ ├── page-producteurs.po
│ │ │ │ ├── page-shortlist.po
│ │ │ │ ├── resource.po
│ │ │ │ ├── reusable_data.po
│ │ │ │ ├── reuser-space.po
│ │ │ │ ├── reuses.po
│ │ │ │ ├── seo.po
│ │ │ │ ├── stats.po
│ │ │ │ ├── user.po
│ │ │ │ ├── validation.po
│ │ │ │ ├── validations-explanations.po
│ │ │ │ └── validations.po
│ │ │ ├── errors.pot
│ │ │ ├── espace-producteurs.pot
│ │ │ ├── explore.pot
│ │ │ ├── feedback.pot
│ │ │ ├── fr/
│ │ │ │ └── LC_MESSAGES/
│ │ │ │ ├── administrative_division.po
│ │ │ │ ├── alert.po
│ │ │ │ ├── autocomplete.po
│ │ │ │ ├── backoffice.po
│ │ │ │ ├── backoffice_dataset.po
│ │ │ │ ├── climate-resilience-bill.po
│ │ │ │ ├── community_resource.po
│ │ │ │ ├── contact.po
│ │ │ │ ├── datagouv-dataset.po
│ │ │ │ ├── dataset.po
│ │ │ │ ├── db-dataset-score.po
│ │ │ │ ├── db-dataset.po
│ │ │ │ ├── db-resource-related.po
│ │ │ │ ├── default.po
│ │ │ │ ├── download-link.po
│ │ │ │ ├── errors.po
│ │ │ │ ├── espace-producteurs.po
│ │ │ │ ├── explore.po
│ │ │ │ ├── feedback.po
│ │ │ │ ├── gbfs_analyzer.po
│ │ │ │ ├── gtfs-diff.po
│ │ │ │ ├── gtfs-file-descriptions.po
│ │ │ │ ├── gtfs-transport-validator.po
│ │ │ │ ├── helper.po
│ │ │ │ ├── landing-vls.po
│ │ │ │ ├── netex-documentation.po
│ │ │ │ ├── netex-validator.po
│ │ │ │ ├── notification_subscription.po
│ │ │ │ ├── page-dataset-details.po
│ │ │ │ ├── page-index.po
│ │ │ │ ├── page-login.po
│ │ │ │ ├── page-nouveautes.po
│ │ │ │ ├── page-producteurs.po
│ │ │ │ ├── page-shortlist.po
│ │ │ │ ├── resource.po
│ │ │ │ ├── reusable_data.po
│ │ │ │ ├── reuser-space.po
│ │ │ │ ├── reuses.po
│ │ │ │ ├── seo.po
│ │ │ │ ├── stats.po
│ │ │ │ ├── user.po
│ │ │ │ ├── validation.po
│ │ │ │ ├── validations-explanations.po
│ │ │ │ └── validations.po
│ │ │ ├── gbfs_analyzer.pot
│ │ │ ├── gtfs-diff.pot
│ │ │ ├── gtfs-file-descriptions.pot
│ │ │ ├── gtfs-transport-validator.pot
│ │ │ ├── helper.pot
│ │ │ ├── landing-vls.pot
│ │ │ ├── netex-documentation.pot
│ │ │ ├── netex-validator.pot
│ │ │ ├── notification_subscription.pot
│ │ │ ├── page-dataset-details.pot
│ │ │ ├── page-index.pot
│ │ │ ├── page-login.pot
│ │ │ ├── page-nouveautes.pot
│ │ │ ├── page-producteurs.pot
│ │ │ ├── page-shortlist.pot
│ │ │ ├── resource.pot
│ │ │ ├── reusable_data.pot
│ │ │ ├── reuser-space.pot
│ │ │ ├── reuses.pot
│ │ │ ├── seo.pot
│ │ │ ├── stats.pot
│ │ │ ├── user.pot
│ │ │ ├── validation.pot
│ │ │ ├── validations-explanations.pot
│ │ │ └── validations.pot
│ │ ├── irve_prioritary_datasets.yml
│ │ ├── mobilitydata_gtfs_rules.json
│ │ ├── repo/
│ │ │ └── migrations/
│ │ │ ├── 20181121164437_create_region.exs
│ │ │ ├── 20181121165604_create_aom.exs
│ │ │ ├── 20181121170709_create_dataset.exs
│ │ │ ├── 20181121171826_create_partner.exs
│ │ │ ├── 20181204093045_add_included_in_dataset_to_datasets.exs
│ │ │ ├── 20181205100445_add_type_id_partner.exs
│ │ │ ├── 20181205134354_aom_postgis_geometry.exs
│ │ │ ├── 20181205163400_add_resources.exs
│ │ │ ├── 20181205164605_migrate_validations.exs
│ │ │ ├── 20181210164424_insee_aom_table.exs
│ │ │ ├── 20181211094634_migrate_format.exs
│ │ │ ├── 20181211164714_add_title_resource.exs
│ │ │ ├── 20181212091910_commune_to_aom.exs
│ │ │ ├── 20181218123622_only_use_postgis.exs
│ │ │ ├── 20181220111337_move_metadata.exs
│ │ │ ├── 20190108103116_remove_region_dataset_aom.exs
│ │ │ ├── 20190130145745_rename_transit_types.exs
│ │ │ ├── 20190204155646_fix_region.exs
│ │ │ ├── 20190207164219_remove_empty_url.exs
│ │ │ ├── 20190227165217_more_resource_metadata.exs
│ │ │ ├── 20190402122703_add_has_realtime.exs
│ │ │ ├── 20190408091202_validations_table.exs
│ │ │ ├── 20190424095327_add_dataset_activation.exs
│ │ │ ├── 20190425142259_add_resource_is_available.exs
│ │ │ ├── 20190506153738_add_dataset_document.exs
│ │ │ ├── 20190509163526_delete_cascade_validations.exs
│ │ │ ├── 20190516142725_search_commune.exs
│ │ │ ├── 20190703092429_use_simple_index_search.exs
│ │ │ ├── 20190910091521_change_siren.exs
│ │ │ ├── 20190911085235_add_resource_content_hash.exs
│ │ │ ├── 20191216144800_add_resource_tags.exs
│ │ │ ├── 20200106094910_nb_reuses.exs
│ │ │ ├── 20200110124026_unaccent.exs
│ │ │ ├── 20200114101848_change_search_conf.exs
│ │ │ ├── 20200114143832_add_dataset_view.exs
│ │ │ ├── 20200116092044_link_dataset_commune.exs
│ │ │ ├── 20200116163306_more_pt.exs
│ │ │ ├── 20200120092242_add_communes_to_view.exs
│ │ │ ├── 20200120154256_dataset_associated_territory.exs
│ │ │ ├── 20200130152852_epci.exs
│ │ │ ├── 20200212092113_materialized_geographic_view.exs
│ │ │ ├── 20200217142350_add_commune_region.exs
│ │ │ ├── 20200220141013_fuzzystrmatch.exs
│ │ │ ├── 20200224093551_places_mat_view.exs
│ │ │ ├── 20200225140241_slug_constraint.exs
│ │ │ ├── 20200311095433_correction_of_dataset_geographic_view.exs
│ │ │ ├── 20200420082026_validation_fields.exs
│ │ │ ├── 20200429171646_netex_conversion_date.exs
│ │ │ ├── 20200505124346_validation_sha.exs
│ │ │ ├── 20200527084259_community_resources.exs
│ │ │ ├── 20200603103539_add_metadatas_for_on_the_fly_validations.exs
│ │ │ ├── 20200603130643_description_text.exs
│ │ │ ├── 20200608155921_rename_netex.exs
│ │ │ ├── 20200610162056_geojson_content_hash.exs
│ │ │ ├── 20200616101043_add_resource_filesize.exs
│ │ │ ├── 20200622141231_create_import_logs_table.exs
│ │ │ ├── 20200623112618_import_logs_unlimited_text_length_logs_table.exs
│ │ │ ├── 20200623134041_create_validation_logs_table.exs
│ │ │ ├── 20200623162648_skipped_validation.exs
│ │ │ ├── 20200630154908_original_resource_url.exs
│ │ │ ├── 20200703080414_resource_log_validation.exs
│ │ │ ├── 20200818124059_add_geojson_to_validation_data.exs
│ │ │ ├── 20200907134321_change_dataset_on_delete_behavior.exs
│ │ │ ├── 20200908085058_resource_datagouv_id.exs
│ │ │ ├── 20201103174924_features_and_modes.exs
│ │ │ ├── 20201103183100_more_places.exs
│ │ │ ├── 20201112110459_update_aoms_fk.exs
│ │ │ ├── 20201209152013_add_indices.exs
│ │ │ ├── 20201214163517_stats_history.exs
│ │ │ ├── 20210126172850_remove_refresh_places_resources_trigger.exs
│ │ │ ├── 20210512142927_add_discussion_timestamp.exs
│ │ │ ├── 20210615132842_charsharing_to_carpooling.exs
│ │ │ ├── 20210622150648_dataset_communes_on_delete.exs
│ │ │ ├── 20210623140048_add_nouvelle_caledonie.exs
│ │ │ ├── 20210630130031_add_resource_schema.exs
│ │ │ ├── 20210811122529_rename-category-bike-scooter-sharing.exs
│ │ │ ├── 20211006144855_rename_category_bike_path.exs
│ │ │ ├── 20211018122851_add_oban_jobs_table.exs
│ │ │ ├── 20211021094750_move_fields_to_jsonb.exs
│ │ │ ├── 20211122101004_add_metrics.exs
│ │ │ ├── 20211130094242_add_resource_history.exs
│ │ │ ├── 20211209090542_add_breaking_news_table.exs
│ │ │ ├── 20211209121042_migrate_gbfs_metrics.exs
│ │ │ ├── 20211210082242_add_resource_unavailability.exs
│ │ │ ├── 20211214142804_create_conversion_table.exs
│ │ │ ├── 20220104092238_add_resource_history_last_up_to_date_at_field.exs
│ │ │ ├── 20220118101217_allow_netex_conversion.exs
│ │ │ ├── 20220124133742_add_category_locations.exs
│ │ │ ├── 20220126101800_add_title_resource_history.exs
│ │ │ ├── 20220208143147_create_gtfs_stops_table.exs
│ │ │ ├── 20220210142527_add_table_gtfs_stop_times.exs
│ │ │ ├── 20220214161600_migrate_old_validation_rows.exs
│ │ │ ├── 20220225104500_drop_table_partner.exs
│ │ │ ├── 20220301085100_add_timestamps_data_import.exs
│ │ │ ├── 20220321151717_remove_resource_deprecated_fields.exs
│ │ │ ├── 20220322090153_rename_dataset_title_fields.exs
│ │ │ ├── 20220322135059_update_search_conf_field_name.exs
│ │ │ ├── 20220329113451_add_index_history_dataset_id.exs
│ │ │ ├── 20220406125936_add_geo_data.exs
│ │ │ ├── 20220412131157_add_resource_filetype.exs
│ │ │ ├── 20220419124355_rename_category_road_network.exs
│ │ │ ├── 20220429092956_set_up_multi_validation.exs
│ │ │ ├── 20220502083641_add_metadata_table.exs
│ │ │ ├── 20220502130846_add_resource_type.exs
│ │ │ ├── 20220505115659_add_resource_display_position.exs
│ │ │ ├── 20220505121748_create_resource_history_format_index.exs
│ │ │ ├── 20220510124001_create_multi_validation_index.exs
│ │ │ ├── 20220523132328_add_resource_history_resource_id.exs
│ │ │ ├── 20220525084346_add_multi_validation_args.exs
│ │ │ ├── 20220531123506_modify_resource_history_constraint.exs
│ │ │ ├── 20220601135310_change-multi-validation-field-name.exs
│ │ │ ├── 20220615090656_add_index_resource_history_resource_id.exs
│ │ │ ├── 20220615123711_add_max_error_field.exs
│ │ │ ├── 20220922134600_fix_region_data.exs
│ │ │ ├── 20220923080044_add_table_departement.exs
│ │ │ ├── 20220923080526_add_columns_communes.exs
│ │ │ ├── 20220929073801_fix_geometries.exs
│ │ │ ├── 20220930122054_add_metadata_modes_features.exs
│ │ │ ├── 20221004125601_set_noumea_siren.exs
│ │ │ ├── 20221004135750_enable_trigger_refresh_places_commune_trigger.exs
│ │ │ ├── 20221004151551_add_modes_features_index.exs
│ │ │ ├── 20221005125656_update_modes_features_places.exs
│ │ │ ├── 20221010144415_rename_population_cols_aom.exs
│ │ │ ├── 20221031094523_dataset_add_timestamps.exs
│ │ │ ├── 20221110101806_rename_mobility_license.exs
│ │ │ ├── 20221129131631_dataset_add_archived_at_field.exs
│ │ │ ├── 20221201165336_create_dataset_history.exs
│ │ │ ├── 20221206132945_on_delete_nilify.exs
│ │ │ ├── 20221206135302_data_import_delete.exs
│ │ │ ├── 20221208083708_add_indexes_resource.exs
│ │ │ ├── 20221228090455_create_oban_peers.exs
│ │ │ ├── 20221228090553_swap_primary_oban_indexes.exs
│ │ │ ├── 20221228142229_remove_resource_start_end_date.exs
│ │ │ ├── 20230103165252_validation_v1_shutdown.exs
│ │ │ ├── 20230110074451_create_data_import_batch.exs
│ │ │ ├── 20230111115335_add_index_dataset_history_slug.exs
│ │ │ ├── 20230112132326_add_notifications.exs
│ │ │ ├── 20230120093740_dataset_add_unique_datagouv_id.exs
│ │ │ ├── 20230124110826_notifications_change_dataset_on_delete_behavior.exs
│ │ │ ├── 20230124131704_dataset_tags.exs
│ │ │ ├── 20230125145703_notifications_add_dataset_datagouv_id.exs
│ │ │ ├── 20230202090645_add_dataset_organization_type.exs
│ │ │ ├── 20230206131831_add_contacts.exs
│ │ │ ├── 20230220102200_add_notification_subscription.exs
│ │ │ ├── 20230302084455_add_dataset_legal_owner.exs
│ │ │ ├── 20230308085359_add_indexes_dataset_history_resources.exs
│ │ │ ├── 20230309102424_add_mailing_list_title_contact.exs
│ │ │ ├── 20230322080214_dataset_change_types_date_cols.exs
│ │ │ ├── 20230327135824_correct_dataset_population_update.exs
│ │ │ ├── 20230329071947_resource_change_types_datetime_cols.exs
│ │ │ ├── 20230404074406_add_resource_related.exs
│ │ │ ├── 20230412080437_contact_add_secondary_phone_number.exs
│ │ │ ├── 20230420134229_notification_subscription_rename_licence_ouverte_reason.exs
│ │ │ ├── 20230427083218_contact_add_datagouv_user_id_last_login_at.exs
│ │ │ ├── 20230524122950_notification_subscription_rename_switching_licences_reason.exs
│ │ │ ├── 20230525152222_create_table_dataset_score.exs
│ │ │ ├── 20230609122110_dataset_add_organization_id.exs
│ │ │ ├── 20230623121709_aom_remove_parent_dataset_id.exs
│ │ │ ├── 20230626130232_notification_subscription_add_role.exs
│ │ │ ├── 20230630074914_add_organization.exs
│ │ │ ├── 20230719123439_dataset_score_topic_index.exs
│ │ │ ├── 20230719124102_dataset_legal_owner_company_siren_type.exs
│ │ │ ├── 20230828142610_aom_trim_departement.exs
│ │ │ ├── 20230913130308_metrics_gbfs_target_name.exs
│ │ │ ├── 20230925124412_gbfs_metrics_remove_404_rows.exs
│ │ │ ├── 20231019121309_data_conversion_add_columns.exs
│ │ │ ├── 20231110140739_remove_columns_from_aom.exs
│ │ │ ├── 20231214145408_dataset_monthly_metrics.exs
│ │ │ ├── 20231222145809_improve_epci.exs
│ │ │ ├── 20231231135108_add_dataset_search_payload.exs
│ │ │ ├── 20240102144643_dataset_add_not_null.exs
│ │ │ ├── 20240110085755_resource_related_on_delete.exs
│ │ │ ├── 20240117075117_resource_monthly_metrics.exs
│ │ │ ├── 20240118133546_create_processing_reports.exs
│ │ │ ├── 20240123133743_dataset_custom_logos_columns.exs
│ │ │ ├── 20240212101805_dataset_add_custom_logo_changed_at.exs
│ │ │ ├── 20240310152509_use_standard_timestamps_precision.exs
│ │ │ ├── 20240311141215_create_dataset_followers.exs
│ │ │ ├── 20240318143637_dataset_add_is_hidden.exs
│ │ │ ├── 20240418140451_resource_url_size.exs
│ │ │ ├── 20240422145836_create_user_feedback.exs
│ │ │ ├── 20240502134737_notification_subscriptions_migrate_platform_producer.exs
│ │ │ ├── 20240515113652_add_dataset_datagouv_id_to_resource_monthly_metrics.exs
│ │ │ ├── 20240515155123_relax_constraint_on_resource_monthly_metrics.exs
│ │ │ ├── 20240520130507_add_datagouv_id_to_dataset_history_resources.exs
│ │ │ ├── 20240604121342_notifications_add_columns.exs
│ │ │ ├── 20240619052714_add_resource_counter_cache.exs
│ │ │ ├── 20240725130522_dataset_organization_type_migrate_value.exs
│ │ │ ├── 20240726065227_dataset_organization_id_not_null.exs
│ │ │ ├── 20240730130026_notifications_adjust_foreign_keys.exs
│ │ │ ├── 20240905121512_dataset_remove_paris2024_tag.exs
│ │ │ ├── 20240926074311_contact_add_creation_source.exs
│ │ │ ├── 20241120132849_geo_data_import_add_slug.exs
│ │ │ ├── 20250204084455_reuser_improved_data.exs
│ │ │ ├── 20250225130151_resource_history_add_reuser_improved_data.exs
│ │ │ ├── 20250305130125_resource_history_nullable_datagouv_id.exs
│ │ │ ├── 20250305142322_create_reuse.exs
│ │ │ ├── 20250305143006_create_reuse_dataset.exs
│ │ │ ├── 20250331133652_token.exs
│ │ │ ├── 20250527092321_create_api_request.exs
│ │ │ ├── 20250530171428_token_add_default_for_contact_id.exs
│ │ │ ├── 20250603104959_default_token.exs
│ │ │ ├── 20250604082439_resource_download.exs
│ │ │ ├── 20250610141027_migrate_dataset_types.exs
│ │ │ ├── 20250617100226_create_administrative_division.exs
│ │ │ ├── 20250618122521_feature_usage.exs
│ │ │ ├── 20250620141849_create_dataset_declarative_spatial_area.exs
│ │ │ ├── 20250816134003_upgrade_oban_jobs_to_v13.exs
│ │ │ ├── 20250818131430_create_proxy_request.exs
│ │ │ ├── 20250905085103_create_irve_tables.exs
│ │ │ ├── 20250905125828_places_add_departement.exs
│ │ │ ├── 20250915101514_places_add_epci.exs
│ │ │ ├── 20250917125049_dataset_geographic_view_administrative_division.exs
│ │ │ ├── 20250922081424_administrative_division_population.exs
│ │ │ ├── 20250922083326_dataset_search_update_remove_population.exs
│ │ │ ├── 20251006140759_delete_cascade_resource_downloads.exs
│ │ │ ├── 20251009114246_add_result_digest_to_multi_validation.exs
│ │ │ ├── 20251027091600_commune_aom_siren.exs
│ │ │ ├── 20251117085613_delete_dataset_communes.exs
│ │ │ ├── 20251117090342_dataset_delete_aom_id_region_id.exs
│ │ │ ├── 20251120121226_create_offer.exs
│ │ │ ├── 20251120125829_create_dataset_offer.exs
│ │ │ ├── 20251121164427_region_iso3166.exs
│ │ │ ├── 20251202120339_region_remove_is_completed.exs
│ │ │ ├── 20251203161550_add_indexes_resource_history.exs
│ │ │ ├── 20251204151437_places_add_offers.exs
│ │ │ ├── 20251204172125_offer_index.exs
│ │ │ ├── 20251204202714_resource_format_override.exs
│ │ │ ├── 20251208124339_places_rename_autocomplete.exs
│ │ │ ├── 20251209110327_departement_iso3166.exs
│ │ │ ├── 20251209123938_add_table_size_history.exs
│ │ │ ├── 20251209144903_add_gtfs_agency.exs
│ │ │ ├── 20251209172445_add_multi_validation_binary_result.exs
│ │ │ ├── 20251215081025_autocomplete_add_format.exs
│ │ │ ├── 20251218133947_autocomplete_add_dataset.exs
│ │ │ ├── 20251220162908_contact_add_locale.exs
│ │ │ ├── 20260117113753_create_hidden_reuser_alerts.exs
│ │ │ ├── 20260122180704_create_dataset_subtype.exs
│ │ │ ├── 20260126155921_allow_netex_to_geojson_conversion.exs
│ │ │ ├── 20260205144049_rename_irve_valid_file_columns.exs
│ │ │ ├── 20260209120000_add_missing_fields_to_irve_valid_file.exs
│ │ │ ├── 20260311000000_create_company.exs
│ │ │ ├── 20260317000001_resource_related_unique_index.exs
│ │ │ └── sql/
│ │ │ ├── autocomplete_add_dataset.sql
│ │ │ ├── autocomplete_add_format.sql
│ │ │ └── places_rename_autocomplete.sql
│ │ ├── search_custom_messages.yml
│ │ └── zfe_ids.csv
│ └── test/
│ ├── build_test.exs
│ ├── datagouvfr/
│ │ └── client/
│ │ ├── api_test.exs
│ │ ├── community_resources_test.exs
│ │ ├── datasets_test.exs
│ │ ├── discussions_test.exs
│ │ ├── resources_test.exs
│ │ └── reuses_test.exs
│ ├── db/
│ │ ├── administrative_division_test.exs
│ │ ├── contact_test.exs
│ │ ├── data_conversion_test.exs
│ │ ├── dataset_follower_test.exs
│ │ ├── dataset_history_test.exs
│ │ ├── dataset_monthly_metric_test.exs
│ │ ├── dataset_score_test.exs
│ │ ├── dataset_test.exs
│ │ ├── geom_test.exs
│ │ ├── irve_valid_pdc_test.exs
│ │ ├── metrics_test.exs
│ │ ├── multi_validation_test.exs
│ │ ├── notification_subscription_test.exs
│ │ ├── notification_test.exs
│ │ ├── resource_history_test.exs
│ │ ├── resource_metadata_test.exs
│ │ ├── resource_monthly_metric_test.exs
│ │ ├── resource_related_test.exs
│ │ ├── resource_test.exs
│ │ ├── resource_unavailability_test.exs
│ │ ├── reuse_test.exs
│ │ ├── test_helper.exs
│ │ ├── token_test.exs
│ │ └── user_feedback_test.exs
│ ├── documentation_links_test.exs
│ ├── ecto_interval_test.exs
│ ├── enroute/
│ │ └── chouette_valid_rulesets_client_test.exs
│ ├── fixture/
│ │ ├── cassettes/
│ │ │ ├── dataset/
│ │ │ │ ├── dataset-aom.json.json
│ │ │ │ ├── dataset-no-region-nor-ao.json.json
│ │ │ │ ├── dataset-region-and-country.json.json
│ │ │ │ ├── dataset-region-ao.json.json
│ │ │ │ ├── dataset-with-multiple-cities-and-country.json.json
│ │ │ │ ├── dataset-with-multiple-cities.json.json
│ │ │ │ └── dataset_twice.json
│ │ │ └── user/
│ │ │ ├── dataset-add-2.json
│ │ │ ├── organization-create-4.json
│ │ │ ├── organization-datasets-1.json
│ │ │ ├── organizations-0.json
│ │ │ └── user-without-organization-3.json
│ │ ├── files/
│ │ │ ├── bibus-brest-gtfs-rt-alerts.pb
│ │ │ ├── csv_latin1.csv
│ │ │ ├── csv_utf8.csv
│ │ │ └── gtfs-rt-validator-errors.json
│ │ ├── gbfs/
│ │ │ ├── free_bike_status.2.2.json
│ │ │ ├── gbfs.2.2.json
│ │ │ ├── gbfs.3.0.json
│ │ │ ├── station_information.2.2.json
│ │ │ ├── station_information.3.0.json
│ │ │ ├── station_status.2.2.json
│ │ │ ├── station_status.3.0.json
│ │ │ └── vehicle_status.3.0.json
│ │ └── schemas/
│ │ ├── schemas.json
│ │ ├── validata_source_error.json
│ │ ├── validata_unknown_custom_check_error.json
│ │ ├── validata_with_errors.json
│ │ ├── validata_with_file_error.json
│ │ ├── validata_with_no_errors.json
│ │ └── validata_with_opening_hours_error.json
│ ├── gtfs/
│ │ └── utils_test.exs
│ ├── netex/
│ │ ├── archive_parser_test.exs
│ │ ├── chouette_valid_ruleset_generator_test.exs
│ │ ├── stop_places_streaming_parser_test.exs
│ │ └── to_geojson/
│ │ ├── coordinates_test.exs
│ │ ├── geojson_builder_test.exs
│ │ ├── parsers/
│ │ │ ├── quay_parser_test.exs
│ │ │ └── service_link_parser_test.exs
│ │ └── to_geojson_test.exs
│ ├── no_css_inline_style_test.exs
│ ├── registry/
│ │ ├── gtfs_test.exs
│ │ ├── model_test.exs
│ │ └── result_test.exs
│ ├── safe_svg_test.exs
│ ├── support/
│ │ ├── channel_case.ex
│ │ ├── conn_case.ex
│ │ ├── data_gouv_api_fixtures.ex
│ │ ├── database_case.ex
│ │ ├── enroute_chouette_valid_client_helpers.ex
│ │ ├── external_case.ex
│ │ ├── factory.ex
│ │ ├── file_stream_utils.ex
│ │ ├── live_case.ex
│ │ ├── live_view_test_helpers.ex
│ │ ├── mocks.ex
│ │ ├── netex_validation_report.ex
│ │ ├── platform.ex
│ │ ├── siri_queries.ex
│ │ ├── tmp_file.ex
│ │ └── zip_creator.ex
│ ├── swoosh_assert_no_email_sent_test.exs
│ ├── test_helper.exs
│ ├── transport/
│ │ ├── S3/
│ │ │ └── aggregates_uploader_test.exs
│ │ ├── application_test.exs
│ │ ├── availability_checker_test.exs
│ │ ├── cache_cachex_test.exs
│ │ ├── cached_files_test.exs
│ │ ├── comments_checker_test.exs
│ │ ├── community_resource_cleaner_test.exs
│ │ ├── companies_test.exs
│ │ ├── consolidated_dataset_test.exs
│ │ ├── custom_search_message_test.exs
│ │ ├── data_checker_test.exs
│ │ ├── data_frame/
│ │ │ └── validation_primitives_test.exs
│ │ ├── dataset_checks_test.exs
│ │ ├── dataset_index_test.exs
│ │ ├── expiration_test.exs
│ │ ├── file_downloads_test.exs
│ │ ├── gbfs_metadata_test.exs
│ │ ├── gbfs_to_geojson_test.exs
│ │ ├── gtfs_data_test.exs
│ │ ├── gtfs_diff_test.exs
│ │ ├── gtfs_query_test.exs
│ │ ├── gtfs_rt_test.exs
│ │ ├── history_fetcher_test.exs
│ │ ├── import_data_service_test.exs
│ │ ├── import_data_test.exs
│ │ ├── irve/
│ │ │ ├── database_importer_test.exs
│ │ │ ├── deduplicator_test.exs
│ │ │ ├── dynamic_irve_schema_test.exs
│ │ │ ├── irve_data_frame_test.exs
│ │ │ ├── irve_extractor_test.exs
│ │ │ ├── irve_http_pagination_test.exs
│ │ │ ├── irve_raw_static_consolidation_test.exs
│ │ │ ├── irve_static_probes_test.exs
│ │ │ ├── processing_test.exs
│ │ │ ├── simple_consolidation_test.exs
│ │ │ ├── static_irve_schema_test.exs
│ │ │ ├── validation/
│ │ │ │ ├── data_frame_validation_test.exs
│ │ │ │ └── validator_test.exs
│ │ │ └── zip_probe_test.exs
│ │ ├── jobs/
│ │ │ ├── archive_metrics_job_test.exs
│ │ │ ├── backfill/
│ │ │ │ ├── backfill_metadata_non_gtfs_resource_history_test.exs
│ │ │ │ ├── backfill_resource_history_filesize_test.exs
│ │ │ │ ├── backfill_resource_history_resource_id_test.exs
│ │ │ │ └── backfill_resource_history_schema_details_test.exs
│ │ │ ├── clean_multi_validation_job_test.exs
│ │ │ ├── clean_on_demand_validation_job_test.exs
│ │ │ ├── clean_orphan_conversions_job_test.exs
│ │ │ ├── consolidate_bnlc_job_test.exs
│ │ │ ├── consolidate_lez_job_test.exs
│ │ │ ├── conversions/
│ │ │ │ ├── gtfs_to_geojson_converter_job_test.exs
│ │ │ │ ├── netex_to_geojson_converter_job_test.exs
│ │ │ │ ├── single_gtfs_to_geojson_converter_job_test.exs
│ │ │ │ └── single_netex_to_geojson_converter_job_test.exs
│ │ │ ├── counter_cache_test.exs
│ │ │ ├── create_tokens_job_test.exs
│ │ │ ├── custom_logo_conversion_job_test.exs
│ │ │ ├── database_backup_replication_job_test.exs
│ │ │ ├── database_vacuum_job_test.exs
│ │ │ ├── dataset_history_job_test.exs
│ │ │ ├── dataset_now_on_nap_notification_job_test.exs
│ │ │ ├── dataset_quality_score_test.exs
│ │ │ ├── datasets_climate_resilience_bill_not_lo_licence_job_test.exs
│ │ │ ├── datasets_switching_climate_resilience_bill_job_test.exs
│ │ │ ├── datasets_without_gtfs_rt_related_resources_notification_job_test.exs
│ │ │ ├── dedupe_history_job_test.exs
│ │ │ ├── default_token_job_test.exs
│ │ │ ├── expiration_notification_job_test.exs
│ │ │ ├── gbfs_multi_validation_job_test.exs
│ │ │ ├── gbfs_operators_notification_job_test.exs
│ │ │ ├── geo_data/
│ │ │ │ ├── bnlc_to_geodata_test.exs
│ │ │ │ ├── gbfs_stations_to_geo_data_test.exs
│ │ │ │ ├── irve_to_geodata_test.exs
│ │ │ │ └── lez_to_geo_data_test.exs
│ │ │ ├── gtfs_diff_job_test.exs
│ │ │ ├── gtfs_import_stops_job_test.exs
│ │ │ ├── gtfs_import_stops_test.exs
│ │ │ ├── gtfs_rt_metadata_test.exs
│ │ │ ├── gtfs_rt_multi_validation_job_test.exs
│ │ │ ├── gtfs_to_db_test.exs
│ │ │ ├── import_companies_job_test.exs
│ │ │ ├── import_dataset_contact_points_job_test.exs
│ │ │ ├── import_dataset_follower_reuser_improved_data_job_test.exs
│ │ │ ├── import_dataset_followers_job_test.exs
│ │ │ ├── import_dataset_monthly_metrics_job_test.exs
│ │ │ ├── import_gbfs_feed_contact_point_job_test.exs
│ │ │ ├── import_resource_monthly_metrics_job_test.exs
│ │ │ ├── import_reuses_job_test.exs
│ │ │ ├── multi_validation_with_error_notification_job_test.exs
│ │ │ ├── netex_poller_job_test.exs
│ │ │ ├── new_comments_notification_job_test.exs
│ │ │ ├── new_datagouv_datasets_job_test.exs
│ │ │ ├── new_dataset_notifications_job_test.exs
│ │ │ ├── notification_subscription_producer_job_test.exs
│ │ │ ├── oban_logger_test.exs
│ │ │ ├── on_demand_netex_poller_job_test.exs
│ │ │ ├── on_demand_validation_job_test.exs
│ │ │ ├── periodic_reminder_producers_notification_job_test.exs
│ │ │ ├── promote_producer_space_job_test.exs
│ │ │ ├── promote_reuser_space_job_test.exs
│ │ │ ├── refresh_autocomplete_job_test.exs
│ │ │ ├── remove_history_job_test.exs
│ │ │ ├── resource_history_job_test.exs
│ │ │ ├── resource_history_jsonschema_validation_job_test.exs
│ │ │ ├── resource_history_tableschema_validation_job_test.exs
│ │ │ ├── resource_history_validation_job_test.exs
│ │ │ ├── resource_unavailable_job_test.exs
│ │ │ ├── resource_unavailable_notification_job_test.exs
│ │ │ ├── resource_validation_job_test.exs
│ │ │ ├── resources_changed_notification_job_test.exs
│ │ │ ├── table_size_history_job_test.exs
│ │ │ ├── transport_tools_test.exs
│ │ │ ├── update_contacts_job_test.exs
│ │ │ ├── update_counter_cache_job_test.exs
│ │ │ ├── visit_download_statistics_job_test.exs
│ │ │ ├── visit_proxy_statistics_job_test.exs
│ │ │ ├── visit_statistics_base_test.exs
│ │ │ ├── warn_user_inactivity_job_test.exs
│ │ │ └── workflow_test.exs
│ │ ├── notifiers_test.exs
│ │ ├── schemas_test.exs
│ │ ├── search_communes_test.exs
│ │ ├── siri_queries_test.exs
│ │ ├── stats_handler_test.exs
│ │ ├── telemetry_test.exs
│ │ └── validators/
│ │ ├── enroute_chouette_valid_client_test.exs
│ │ ├── gbfs_validator_test.exs
│ │ ├── gtfs_rt_validator_test.exs
│ │ ├── gtfs_transport_validator_test.exs
│ │ ├── jsonschema_validata_json_test.exs
│ │ ├── jsonschema_validator_test.exs
│ │ ├── mobilitydata_gtfs_validator_client_test.exs
│ │ ├── mobilitydata_gtfs_validator_test.exs
│ │ ├── netex/
│ │ │ ├── metadata_extractor_test.exs
│ │ │ ├── results_adapters/
│ │ │ │ ├── commons_test.exs
│ │ │ │ ├── v0_1_0_test.exs
│ │ │ │ ├── v0_2_0_test.exs
│ │ │ │ └── v0_2_1_test.exs
│ │ │ └── validator_test.exs
│ │ ├── tableschema_validator_test.exs
│ │ └── validator_selection_test.exs
│ ├── transport_web/
│ │ ├── controllers/
│ │ │ ├── aoms_controller_test.exs
│ │ │ ├── api/
│ │ │ │ ├── aom_controller_test.exs
│ │ │ │ ├── autocomplete_controller_test.exs
│ │ │ │ ├── datasets_controller_test.exs
│ │ │ │ ├── features_controller_test.exs
│ │ │ │ ├── geo_query_controller_test.exs
│ │ │ │ ├── gtfs_stops_controller_test.exs
│ │ │ │ ├── schemas_test.exs
│ │ │ │ ├── stats_controller_test.exs
│ │ │ │ └── validators_controller_test.exs
│ │ │ ├── atom_controller_test.exs
│ │ │ ├── backoffice/
│ │ │ │ ├── backoffice_controller_test.exs
│ │ │ │ ├── broken_urls_controller_test.exs
│ │ │ │ ├── contact_controller_test.exs
│ │ │ │ ├── dataset_controller_test.exs
│ │ │ │ ├── notification_subscription_controller_test.exs
│ │ │ │ └── page_controller_test.exs
│ │ │ ├── breaking_news_controller_test.exs
│ │ │ ├── contact_controller_test.exs
│ │ │ ├── conversion_controller_test.exs
│ │ │ ├── dataset_controller_test.exs
│ │ │ ├── dataset_search_test.exs
│ │ │ ├── discussion_controller_test.exs
│ │ │ ├── espace_producteur_controller_test.exs
│ │ │ ├── explore_controller_test.exs
│ │ │ ├── gbfs_analyzer_controller_test.exs
│ │ │ ├── landing_page_controller_test.exs
│ │ │ ├── nav_test.exs
│ │ │ ├── page_controller_test.exs
│ │ │ ├── pagination_helpers_test.exs
│ │ │ ├── resource_controller_test.exs
│ │ │ ├── reuse_controller_test.exs
│ │ │ ├── reuser_space_controller_test.exs
│ │ │ ├── seo_test.exs
│ │ │ ├── session_controller_test.exs
│ │ │ ├── siri_querier_test.exs
│ │ │ └── validation_controller_test.exs
│ │ ├── live_views/
│ │ │ ├── cache_live_test.exs
│ │ │ ├── custom_tags_live_test.exs
│ │ │ ├── dataset_notifications_live_test.exs
│ │ │ ├── discussions_live_test.exs
│ │ │ ├── edit_dataset_live_test.exs
│ │ │ ├── email_preview_live_test.exs
│ │ │ ├── feedback_live_test.exs
│ │ │ ├── follow_dataset_live_test.exs
│ │ │ ├── gtfs_diff_explain_test.exs
│ │ │ ├── gtfs_diff_select_live_test.exs
│ │ │ ├── notifications_live_test.exs
│ │ │ ├── proxy_config_live_test.exs
│ │ │ ├── proxy_requests_count_live_test.exs
│ │ │ ├── rate_limiter_live_test.exs
│ │ │ ├── reuses_live_test.exs
│ │ │ ├── user_space_datasets_live_test.exs
│ │ │ └── validate_resource_live_test.exs
│ │ ├── plugs/
│ │ │ ├── custom_secure_browser_headers_test.exs
│ │ │ ├── producer_data_test.exs
│ │ │ ├── rate_limiter_test.exs
│ │ │ └── worker_healthcheck_test.exs
│ │ ├── routing/
│ │ │ ├── canonical_host_redirect_test.exs
│ │ │ ├── headers_and_cookies_test.exs
│ │ │ ├── health_check_test.exs
│ │ │ ├── proxy_routing_test.exs
│ │ │ ├── put_locale_test.exs
│ │ │ └── router_test.exs
│ │ ├── session_test.exs
│ │ └── views/
│ │ ├── avatar_view_test.exs
│ │ ├── backoffice/
│ │ │ └── page_view_test.exs
│ │ ├── dataset_view_test.exs
│ │ ├── error_view_test.exs
│ │ ├── markdown_handler_test.exs
│ │ ├── no_html_in_markdown_templates_test.exs
│ │ ├── page_view_test.exs
│ │ └── resource_view_test.exs
│ ├── unlock/
│ │ ├── batch_metrics_test.exs
│ │ ├── config_fetcher_test.exs
│ │ ├── controllers/
│ │ │ └── unlock_controller_test.exs
│ │ ├── dynamic_irve/
│ │ │ └── feed_worker_test.exs
│ │ ├── dynamic_irve_integration_test.exs
│ │ ├── enforce_ttl_test.exs
│ │ ├── finch_impl_test.exs
│ │ ├── github_config_test.exs
│ │ ├── plugs/
│ │ │ └── token_auth_test.exs
│ │ ├── shared_test.exs
│ │ ├── siri_test.exs
│ │ └── test_helper.exs
│ ├── with_mock_must_not_use_async_check_test.exs
│ ├── with_mox_verify_on_exit_test.exs
│ └── zip_test.exs
├── blog/
│ └── README.md
├── config/
│ ├── config.exs
│ ├── data_sharing_pilot.exs
│ ├── database.exs
│ ├── datagouvfr.exs
│ ├── dev.exs
│ ├── dev.secret.template.exs
│ ├── gbfs_validator.exs
│ ├── gtfs_validator.exs
│ ├── mail.exs
│ ├── prod.exs
│ ├── proxy-config.sample.yml
│ ├── runtime.exs
│ └── test.exs
├── dialyzer-plt/
│ └── .gitkeep
├── docker/
│ └── database/
│ ├── Dockerfile
│ ├── create_test_db.sh
│ ├── readme.md
│ └── restore_db.sh
├── docker-compose.yml
├── docker_phoenix_startup.sh
├── docs/
│ ├── contacts.md
│ ├── data_model.livemd
│ ├── inventaire_donnees_geographiques_septembre_2023.md
│ ├── scaleway/
│ │ ├── README.md
│ │ ├── bucket_lifecycle_configuration_production.json
│ │ ├── bucket_lifecycle_configuration_staging.json
│ │ ├── bucket_policy_production.json
│ │ └── bucket_policy_staging.json
│ └── upgrade_versions.md
├── generate_deps_changelogs.exs
├── glossary.md
├── learning_track.md
├── livebook/
│ ├── irve-total.livemd
│ └── irve-watts-detection.livemd
├── mix.exs
├── ops_tests/
│ └── ops_tests.exs
├── restore_db.sh
├── screens/
│ ├── screens.exs
│ ├── screens.livemd
│ └── vehicles.livemd
└── scripts/
├── .gitignore
├── README.md
├── api/
│ ├── .gitignore
│ ├── filter_gtfs_rt_by_entity_types.exs
│ └── spec_check.exs
├── api_datasets_check.exs
├── backfill_multi_validation_binary_result.exs
├── backfill_multi_validation_digest.exs
├── backfill_netex_metadata.exs
├── backups-analysis.livemd
├── chouette_valid_rulesets.exs
├── compare-json.sh
├── compare_http.exs
├── contacts/
│ ├── contact_export.exs
│ ├── insert_contacts.exs
│ └── insert_notification_subscriptions.exs
├── debugging.livemd
├── download_resource_history_files.exs
├── drop_and_recreate_database.exs
├── elixir_predicate_upgrade.exs
├── git_diff_experiment.exs
├── gtfs_expiry.exs
├── hash_compute_experiment.exs
├── http_generic_testing.exs
├── ingest_resource_history_files.exs
├── internal_zip_checksum_experiment.exs
├── irve/
│ ├── .gitignore
│ ├── analyze-irve.exs
│ ├── difference.exs
│ ├── dump-simple-consolidation.exs
│ ├── dyn-analysis.exs
│ ├── dynamic-irve.exs
│ ├── horodatage-formats.exs
│ ├── process-one.exs
│ ├── process-raw-static-consolidation.exs
│ ├── process-simple-consolidation.exs
│ ├── report-on-simple-consolidation.exs
│ ├── stats.exs
│ ├── validate.exs
│ └── validate_and_import_local_file.exs
├── irve_diff.livemd
├── memory.exs
├── netex-accessibilite.exs
├── netex_analyzer.exs
├── netex_layout_analyzer.exs
├── notion_link_databases.exs
├── oban/
│ ├── oban_experiments.exs
│ └── oban_suite.exs
├── pagination_example.exs
├── registre-arrets.exs
├── req_httpoison_testing.exs
├── req_stream.exs
├── search_engine.exs
├── siri/
│ ├── siri_check.exs
│ ├── siri_create_doc.exs
│ └── siri_request_parsing.exs
└── subquery.exs
================================================
FILE CONTENTS
================================================
================================================
FILE: .credo.exs
================================================
# This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/", ~r"/apps/transport/client/", "apps/transport/lib/transport/protobuf/gtfs-realtime.pb.ex"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
{Credo.Check.Design.DuplicatedCode, false},
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, false},
# For some checks, you can also set other parameters
#
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
# {Credo.Check.Design.DuplicatedCode, excluded_macros: []},
{Credo.Check.Design.TagTODO, false},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 120},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames, ignore: [~r/Transport.Validators.NeTEx.ResultsAdapters.V/]},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart, false},
# Too unpractical ; we want to keep pipes of separate rejects
{Credo.Check.Refactor.RejectReject, false},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
{Credo.Check.Warning.MixEnv},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false},
# Deprecated checks (these will be deleted after a grace period)
#
{Credo.Check.Readability.Specs, false},
# Custom checks can be created using `mix credo.gen.check`.
#
{Credo.Check.Warning.ForbiddenModule,
[
modules: [Timex]
]}
]
}
]
}
================================================
FILE: .dialyzer_ignore.exs
================================================
[
# temporary fix for https://github.com/elixir-ecto/postgrex/issues/549
~r/deps\/postgrex\/lib\/postgrex\/type_module.ex/,
~r/lib\/postgrex\/type_module.ex/,
# EctoInterval raises an unknown_type error
~r/gtfs_stop_times.ex/,
# Cloak.Ecto.SHA256 and DB.Encrypted.Binary raise an unknown_type error
# See https://github.com/danielberkompas/cloak_ecto/issues/55
{"lib/db/contact.ex", :unknown_type, 0},
{"lib/db/user_feedback.ex", :unknown_type, 0},
{"lib/db/notification.ex", :unknown_type, 0},
{"lib/db/token.ex", :unknown_type, 0},
# Workaround for "Overloaded contract for Transport.Cldr.Calendar.localize/3
# has overlapping domains; such contracts are currently unsupported and are
# simply ignored."
~r/lib\/cldr.ex/
]
================================================
FILE: .editorconfig
================================================
root = true
[*]
indent_size = 2
indent_style = space
charset = utf-8
trim_trailing_whitespace = true
spaces_around_operators = true
max_line_length = 80
insert_final_newline = true
end_of_line = lf
[*.js]
indent_size = 4
[*.tag]
indent_size = 4
[*.md]
trim_trailing_whitespace = false
[*.markdown]
trim_trailing_whitespace = false
================================================
FILE: .eslintignore
================================================
/node_modules
**/*/node_modules
priv/static/js
================================================
FILE: .formatter.exs
================================================
[
plugins: [Phoenix.LiveView.HTMLFormatter],
inputs: [
"mix.exs",
"config/*.exs",
"apps/*/{lib,test}/**/*.{ex,exs,heex}",
"apps/transport/priv/repo/migrations/*.{ex,exs}",
"scripts/**/*.exs",
"ops_tests/**/*.exs"
],
line_length: 120
]
================================================
FILE: .github/CODEOWNERS
================================================
* @etalab/transport-tech
================================================
FILE: .github/actions/checkout-compile/action.yml
================================================
name: "Checkout & compile"
description: "Checkout and compile the code"
runs:
using: "composite"
steps:
- uses: erlef/setup-beam@v1
with:
version-file: .tool-versions
version-type: strict
- name: Set safe directory
run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
shell: bash
- name: Cache deps
id: cache-deps
uses: actions/cache@v4
env:
cache-name: cache-elixir-deps
with:
path: deps
key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('.tool-versions') }}-${{ hashFiles('**/mix.lock') }}
restore-keys: |
${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('.tool-versions') }}
${{ runner.os }}-mix-${{ env.cache-name }}-
- name: Cache compiled build
id: cache-build
uses: actions/cache@v4
env:
cache-name: cache-compiled-build-v2
with:
path: _build
key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('.tool-versions') }}-${{ hashFiles('**/mix.lock') }}
restore-keys: |
${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('.tool-versions') }}
${{ runner.os }}-mix-${{ env.cache-name }}-
- name: Cache JS assets
id: cache-js-assets
uses: actions/cache@v4
env:
cache-name: cache-js-assets
with:
path: |
apps/transport/client/node_modules
apps/transport/priv/static
key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('apps/transport/client/yarn.lock') }}
restore-keys: |
${{ runner.os }}-mix-${{ env.cache-name }}-
${{ runner.os }}-mix-
- name: Move transport-tools folder
run: mv /transport-tools ./transport-tools
shell: bash
- name: Install hex
run: mix local.hex --force
shell: bash
- name: Install rebar
run: mix local.rebar --force
shell: bash
- name: Install mix dependencies
run: mix deps.get
shell: bash
- name: Compile code
run: |
mix compile
MIX_ENV=test mix compile
shell: bash
- name: Install yarn dependencies
run: cd apps/transport/client && yarn install
shell: bash
- name: Compile assets
run: cd apps/transport/client && npm run build
shell: bash
================================================
FILE: .github/workflows/ops_tests.yml
================================================
name: CI ops tests
on: push
jobs:
test:
runs-on: ubuntu-latest
name: Run ops tests
steps:
- uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
version-file: .tool-versions
version-type: strict
- run: elixir ops_tests/ops_tests.exs
================================================
FILE: .github/workflows/sentry_release.yml
================================================
name: Sentry release integration
on:
push:
branches:
- master
jobs:
# See https://github.com/marketplace/actions/sentry-release
# See https://sentry.io/settings/transport-data-gouv-fr/developer-settings/github-action-release-integration-f9f6ff/
notify:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 5
- name: Create Sentry release
uses: getsentry/action-release@v1
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: 'transport-data-gouv-fr'
SENTRY_PROJECT: 'transport-site'
with:
environment: prod
================================================
FILE: .github/workflows/test.yml
================================================
name: Test suite
on:
- push
- workflow_dispatch
env:
# https://github.com/erlef/setup-beam#self-hosted-runners
ImageOS: ubuntu24
jobs:
lint:
name: 💅 Run lint tests
runs-on: ubuntu-latest
container: ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0
steps:
- uses: actions/checkout@v6
- name: Checkout and compile
uses: ./.github/actions/checkout-compile
- name: Run gettext check
run: mix gettext.extract --check-up-to-date
- name: Run credo
run: mix credo --strict
- name: Run javascript linter
run: mix npm "run linter:ecma"
- name: Run stylesheets linter
run: mix npm "run linter:sass"
- name: Run formatter
run: mix format --check-formatted --dry-run
test:
name: ⚙️ Run the test suite
runs-on: ubuntu-latest
container: ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0
env:
PG_URL_TEST: "ecto://postgres:postgres@postgres/transport_test"
services:
postgres:
image: timescale/timescaledb-ha:pg18.0-ts2.23.0
env:
POSTGRES_USER: postgres
POSTGRES_DB: transport_test
POSTGRES_PASSWORD: postgres
credentials:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v6
- name: Checkout and compile
uses: ./.github/actions/checkout-compile
- name: Run tests
run: mix test --warnings-as-errors
================================================
FILE: .github/workflows/trivy_scan.yml
================================================
name: trivy_scan
on:
schedule:
- cron: 0 9 * * MON
jobs:
build:
name: Scan
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Extract identifier of Docker image to scan
id: docker_image_ref_retrieval
run: echo ::set-output name=TARGET_IMAGE_REF::$(cat Dockerfile | grep FROM | head -1 | cut -d' ' -f2)
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
image-ref: "${{ steps.docker_image_ref_retrieval.outputs.TARGET_IMAGE_REF }}"
format: 'table'
================================================
FILE: .gitignore
================================================
# App artifacts
/_build
/db
/deps
/*.ez
.elixir_ls
.DS_Store
# Generated on crash by the VM
erl_crash.dump
# Npm stuff
/node_modules
/**/*/node_modules
/**/*/npm-debug.*
# Code coverage output
/**/cover
# Data munging
cache-dir
data-tmp
# Since we are building assets from client,
# we ignore priv/static.
apps/transport/priv/static/css/*
apps/transport/priv/static/fonts/*
apps/transport/priv/static/images/*
apps/transport/priv/static/js/*
# Environment variables files
/.env
# The config/prod.secret.exs file by default contains sensitive
# data and you should not commit it into version control.
#
# Alternatively, you may comment the line below and commit the
# secrets file as long as you replace its contents by environment
# variables.
/config/prod.secret.exs
# the blog build folder
/blog/public
.clever.json
dialyzer-plt/*.plt
dialyzer-plt/*.plt.hash
config/proxy-config.yml
config/notifications-config.yml
config/dev.secret.exs
scripts/siri/config.yml
transport-tools
apps/transport/client/yarn-error.log
.miniorc
livebook/cache-dir
apps/transport/priv/repo/structure.sql
pg.list
# Any CSV file put at the root level
/*.csv
/*~lock.*.csv#
================================================
FILE: .miniorc.template
================================================
# Steps to run minio locally as a S3 container for development:
# 1. cp .miniorc.template .miniorc
# 2. Install MinIO locally. Two options:
# 2.1. It is preferable to use a simple binary, non-Docker version when available
# Mac https://min.io/docs/minio/macos/index.html
# Linux https://github.com/minio/minio?tab=readme-ov-file#gnulinux
# in which case you can do:
# mkdir /minio/data (or wherever you want)
# minio server --console-address :9090 ~/data
# 2.2. For Docker support (a bit more involved), follow instructions at
# https://docs.min.io/minio/baremetal/quickstart/container.html#quickstart-container
# which means at time of writing:
# NOTE: -name removed for simplicity, and "quayio" removed since the container appeared outdated
# docker run -p 9000:9000 -p 9090:9090 -v ~/minio/data:/data -e "MINIO_ROOT_USER=$MINIO_ROOT_USER" -e "MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD" minio/minio server /data --console-address ":9090"
# 3. modify the password and source it:
# `source .miniorc`
export MINIO_ROOT_USER=test-local
export MINIO_ROOT_PASSWORD=apoi8761876bbazeriouy
# 4. Run minIO (Example: `./minio server --console-address :9090 ./miniodata` with a binary Linux executable)
# 5. go to console at http://127.0.0.1:9090 and create needed buckets:
# `transport-data-gouv-fr-resource-history-dev` for resource history
# `transport-data-gouv-fr-aggregates-dev` for aggregated IRVE and so on
# 6. setup `dev.secret.exs` from `dev.secret.template.exs`
# 8. start `mix phx.server` or run a script such as `mix run scripts/irve/process-raw-static-consolidation.exs`
================================================
FILE: .stylelintrc.json
================================================
{
"extends": ["stylelint-config-standard", "stylelint-config-standard-scss"],
"rules": {
"scss/dollar-variable-colon-space-after": "at-least-one-space",
"scss/dollar-variable-default": true,
"color-no-invalid-hex": true,
"function-calc-no-unspaced-operator": true,
"unit-no-unknown": true,
"property-no-unknown": true,
"declaration-block-no-duplicate-properties": true,
"selector-pseudo-class-no-unknown": true,
"selector-pseudo-element-no-unknown": true,
"selector-type-no-unknown": true,
"media-feature-name-no-unknown": true,
"media-feature-range-notation": "prefix",
"comment-no-empty": true,
"no-duplicate-at-import-rules": true,
"no-duplicate-selectors": true,
"no-descending-specificity": true,
"no-empty-source": true,
"rule-empty-line-before": null,
"selector-class-pattern": null,
"hue-degree-notation": null,
"color-function-notation": null,
"scss/dollar-variable-empty-line-before": null,
"alpha-value-notation": null,
"at-rule-empty-line-before": null,
"value-keyword-case": null,
"comment-empty-line-before": null,
"font-family-no-missing-generic-family-keyword": null,
"scss/at-extend-no-missing-placeholder": null,
"selector-id-pattern": null,
"declaration-block-no-redundant-longhand-properties": null,
"scss/double-slash-comment-empty-line-before": null
}
}
================================================
FILE: .tool-versions
================================================
# NOTE: this file does _not_ define what is used in production
# (see https://github.com/etalab/transport-site/issues/1336), but it
# provides an easy way to ensure local development and test will rely on roughly
# the same versions as in production, if one uses the "asdf" version manager.
# Update ASDF and plugins: asdf update && asdf plugin update --all
# See:
# - https://hexdocs.pm/elixir/compatibility-and-deprecations.html
# - https://github.com/elixir-lang/elixir/releases
# - `asdf list all elixir`
elixir 1.19.4-otp-27
# See:
# - https://github.com/erlang/otp/releases
# - Blog, e.g. https://www.erlang.org/blog/my-otp-25-highlights/
# - https://github.com/erlang/otp/blob/master/otp_versions.table
# - `asdf list all erlang`
erlang 27.3.4.1
# Take an LTS version on https://nodejs.org/
nodejs 22.16.0
================================================
FILE: .vscode/launch.json
================================================
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "mix_task",
"name": "mix phx.server",
"request": "launch",
"projectDir": "${workspaceRoot}",
"task": "phx.server"
}
]
}
================================================
FILE: Dockerfile
================================================
FROM ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0
RUN mkdir phoenixapp
WORKDIR /phoenixapp
COPY ./ /phoenixapp
RUN mv /transport-tools /phoenixapp
RUN mix do deps.get --only prod
RUN elixir --version
RUN erl -noshell -eval 'erlang:display(erlang:system_info(system_version))' -eval 'init:stop()'
RUN node --version
ENV PORT 8080
ENV MIX_ENV prod
RUN mix deps.compile
RUN cd apps/transport/client && yarn install && npm run build
# assets digest must happen after the npm build step
RUN mix phx.digest
# Package source code for Sentry https://hexdocs.pm/sentry/upgrade-10-x.html
RUN mix sentry.package_source_code
EXPOSE 8080
# See https://github.com/etalab/transport-site/issues/1384
#
# Here I discovered that a default cookie is generated automatically by Erlang,
# and that its value will be the same when running Phoenix vs. running an iex
# on the same node (but it will be different on e.g. site vs worker).
#
# This cookie is stored in `~/.erlang_cookie` and can be read programmatically
# via `:erlang.get_cookie()`.
#
# So as long as a `-sname` has been set at Phoenix startup, this is good enough to
# allow iex connection with the following command (after SSH):
#
# `iex --sname console --remsh node`
#
# I was also able to define a custom cookie, and I'm saving the notes in case we
# decide the default cookie is not good enough, or detect a situation where it could
# be guessable in a way or another.
#
# Add this right above the `ENTRYPOINT`:
#
# `ENV ERL_FLAGS="-cookie $ELIXIR_NODE_SECRET_COOKIE"`
#
# (`-cookie` is not a typo, this is different from `elixir --cookie`)
#
# You will need to make sure to define the variable, otherwise it will fallback
# to the automatically generated cookie value.
#
# Setting `ERL_FLAGS` via `ENV` makes it possible not to introduce a subshell
# to evaluate the variable in `ENTRYPOINT`, something that would introduce other
# problems such as the behaviour of kill on the container (subprocesses).
#
# If you use `ERL_FLAGS` with a custom cookie, the command to connect to the node
# will be slightly different:
# `iex --sname console --cookie $ELIXIR_NODE_SECRET_COOKIE --remsh node`
#
ENTRYPOINT ["elixir", "--sname", "node", "-S", "mix", "phx.migrate_phx.server"]
================================================
FILE: Dockerfile.dev
================================================
FROM ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0
RUN apt-get install -y git inotify-tools postgresql-client>=11
RUN mkdir /app/
RUN mkdir /app/_build
RUN mkdir /app/deps/
WORKDIR /app/
RUN mv /transport-tools /app
# fetch a wait-for-it script to wait for postgres startup
ADD https://raw.githubusercontent.com/vishnubob/wait-for-it/81b1373f17855a4dc21156cfe1694c31d7d1792e/wait-for-it.sh /opt/bin/
RUN chmod +x /opt/bin/wait-for-it.sh
ADD docker_phoenix_startup.sh /docker_phoenix_startup.sh
RUN chmod +x /docker_phoenix_startup.sh
ADD mix.exs mix.lock /app/
ADD config /app/config/
ADD apps /app/apps/
CMD [ "/docker_phoenix_startup.sh" ]
================================================
FILE: LICENSE.AGPL.txt
================================================
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<http://www.gnu.org/licenses/>.
================================================
FILE: README.md
================================================
# Transport
This is the repository of the [french National Access Point](https://transport.data.gouv.fr/) (NAP) for mobility data.
This project brings a mobility focus on data hosted on [data.gouv.fr](https://www.data.gouv.fr), the french open data portal.
You will find user documentation at [doc.transport.data.gouv.fr](https://doc.transport.data.gouv.fr).
A status dashboard is available at [https://stats.uptimerobot.com/q7nqyiO9yQ](https://stats.uptimerobot.com/q7nqyiO9yQ) for a part of the project.
# Glossary
A small glossary explaining the various terms can be found in this repo ([glossary.md](glossary.md)). Please feel free to add any term which appears initially foreign there.
# Installation
You can install this 2 different ways:
* [manually](#manual_install), this is the best way to install it if you plan to work often on the project.
* [with docker](#docker_install), this is an easier installation process at the cost of a slightly more cumbersome development workflow.
## Manual installation <a name="manual_install"></a>
### 1. Install Elixir, Node and Yarn
Make sure you have **Elixir**, **Node.js** and **Yarn** installed and up-to-date. **Docker** is optionnally needed if you want to run on your machine the [extra transport tools](https://github.com/etalab/transport-tools).
**Elixir** is often installed with [asdf](https://asdf-vm.com/) since it makes it easy to handle different **Elixir** versions accross projects.
If you wish to use `asdf` (recommended), make sure to install the correct plugins:
* `asdf plugin add erlang` (https://github.com/asdf-vm/asdf-erlang)
* `asdf plugin add elixir` (https://github.com/asdf-vm/asdf-elixir)
* `asdf plugin add nodejs` (https://github.com/asdf-vm/asdf-nodejs)
Installation for Erlang, Elixir and Node.js can then be done with:
* `asdf install`
For Yarn, bring your version or also use asdf:
* `asdf plugin add yarn` (https://github.com/twuni/asdf-yarn)
* `asdf install yarn latest` (or any other version)
* `asdf set -u yarn latest`
### 2. Install PostgreSQL
You also need an up-to-date PostgreSQL with Postgis installed. Version 14+ is recommended.
For Mac users, you can use https://postgresapp.com/.
### 3. Build project
* Install Elixir dependencies with `mix deps.get`
* Install Node.js dependencies with `mix yarn install`
## Docker installation
Alternatively, you can use Docker, see the [Docker section](#docker-installation-).
## Prepare the PostgreSQL database
### Creating a database
Make sure you have a postgres user with postgres password, and that the identification methode is set to md5 in your `pg_hba.conf` file.
Create the database with the command `mix ecto.create`.
Alternatively, you can create it manually. With the permission to create a database (on Debian based system, you need to be logged as `postgres`), type
`createdb transport_repo`.
### Applying the migrations
To have an up to date database schema run `mix ecto.migrate`.
### Restoring the production database
The production database does not contains any sensitive data, you can retreive it for dev purpose.
* You can retrieve the [latest Clever Cloud backup](https://console.clever-cloud.com/organisations/orga_f33ebcbc-4403-4e4c-82f5-12305e0ecb1b/addons/addon_beebaa5e-c3a4-4c57-b124-cf9d1473450a) (you need some permissions to access it, if you don't have them, you can ask someone on the team to give you the database)
* On the clever-cloud website, under transport-site-postgresql, there is a Backups section with download links.
* restore the downloaded backup on you database: `./restore_db.sh <path_to_the_backup>`
## Binary CLI dependencies (optional)
The app uses a number of tools via [transport-tools](https://github.com/etalab/transport-tools).
They are expected at `./transport-tools` by default (but this can be configured via `:transport_tools_folder` in `config.exs`).
When working locally, you may want to have these tools readily available at times.
```
mkdir transport-tools
cd transport-tools
# jars are cross-platform, so we can copy them from the container (see `transport-tools` repository for exact naming)
# here we share the host folder `transport-site/transport-tools` with a folder inside the container named `/tmp-docker-folder`,
# in order to copy back the exact same jars we use in production
docker run --rm -v $(pwd):/tmp-docker-folder ghcr.io/etalab/transport-tools:latest /bin/sh -c "cp /usr/local/bin/*.jar /tmp-docker-folder"
```
For Rust binaries, you will have to compile them locally and copy them to the same folder.
Once this is done, make sure to configure your configuration via `:transport_tools_folder`.
# Usage
Run the server with `mix phx.server` and you can visit [`127.0.0.1:5000`](http://127.0.0.1:5000) on your browser.
## Usage of the Elixir Proxy
[`apps/unlock`](https://github.com/etalab/transport-site/tree/master/apps/unlock) is a sub-part of the "umbrella app", which is served on its own subdomain (https://proxy.transport.data.gouv.fr for production, https://proxy.prochainement.transport.data.gouv.fr/ for staging).
The proxy relies on this [yaml configuration](https://github.com/transportdatagouvfr/proxy-config/blob/master/proxy-config.yml) which is currently fetched at runtime once (but can be hot-reloaded via this [backoffice page](https://transport.data.gouv.fr/backoffice/proxy-config)).
Each proxied "feed" (currently GTFS-RT data) has a private (target) url hidden from the general public, can be configured with an independent Time-To-Live (TTL), and is exposed as a credential-free public url to the public. When a query occurs, the incoming HTTP connection is kept on hold while the proxy issues a query to the target server, caching the response in RAM based on the configured TTL.
The backoffice implementation leverages [LiveView](https://github.com/phoenixframework/phoenix_live_view) to provide an automatically updated dashboard view with all the feeds, the size of the latest payload, the latest HTTP code returned by the target etc. Implementation is [here](https://github.com/etalab/transport-site/tree/master/apps/transport/lib/transport_web/live/backoffice).
When working in development, instead of fetching the configuration from GitHub, the configuration is taken from a local config file (`config/proxy-config.yml`, see [config](https://github.com/etalab/transport-site/blob/master/config/dev.exs#L3)), in order to make it very easy to play with sample configurations locally.
For local work, you will have (for now at least) to add `proxy.localhost 127.0.0.1` to your `/etc/hosts` file.
The app currently routes whatever starts with `proxy.` to the proxy (as implemented [here](https://github.com/etalab/transport-site/blob/master/apps/transport/lib/transport_web/plugs/router.ex)), although in the future we will probably use a more explicit configuration.
## Configuring OAuth to work with demo.data.gouv.fr
By default the development configuration is very simple and only allows the most basic scenarios.
If you need to login via `demo.data.gouv.fr`, follow these steps:
* Create or edit `config/dev.secret.exs`
* Add:
```elixir
config :oauth2, Datagouvfr.Authentication,
# go to Clever Cloud staging site and pick `DATAGOUVFR_CLIENT_ID`
client_id: "TODO-REPLACE",
# same but use `DATAGOUVFR_CLIENT_SECRET`
client_secret: "TODO-REPLACE"
```
Then make sure to restart the app.
The rest of the configuration is already set via `dev.exs`, with:
```elixir
config :oauth2, Datagouvfr.Authentication,
# SNIP
site: "https://demo.data.gouv.fr",
redirect_uri: "http://localhost:5000/login/callback"
```
# Development
## Testing
### Running the tests
Run the tests with `mix test`
The application is an [umbrella app](https://elixir-lang.org/getting-started/mix-otp/dependencies-and-umbrella-projects.html). It means that it is split into several sub-projects (that you can see under `/apps`).
To run tests for a specific app, for example the `transport` or `unlock` app, use this command:
```
# for apps/transport app
mix cmd --app transport mix test --color
# for apps/unlock
mix cmd --app unlock mix test --color
# or, for a single file, or single test
mix cmd --app transport mix test --color test/transport_web/integrations/backoffice_test.exs
mix cmd --app transport mix test --color test/transport_web/integrations/backoffice_test.exs:8
```
The filenames must be relative to the app folder. This [will be improved](https://dockyard.com/blog/2019/06/17/testing-in-umbrella-apps-improves-in-elixir-1-9) when we upgrade to a more modern Elixir version.
### Measuring test coverage
We use [excoveralls](https://github.com/parroty/excoveralls) to measure which parts of the code are covered by testing (or not). This is useful to determine where we can improve the testing quality.
The following commands will launch the test and generate coverage:
```
# Display overall (whole app) coverage for all tests in the console
MIX_ENV=test mix coveralls --umbrella
# Same with a HTML report
MIX_ENV=test mix coveralls.html --umbrella
# Display coverage for each umbrella component, rather
MIX_ENV=test mix coveralls
```
The coverage is written on screen by default, or in the `cover` subfolders for HTML output.
Running in `--umbrella` mode will generate coverage report at the top-level `cover` folder, while running without it will generate reports under each umbrella sub-app (e.g. `apps/transport/cover`).
## Linting
* Run the elixir linter with `mix credo --strict`
* Run the javascript linter with `mix npm "run linter:ecma"`
* Run the sass linter with `mix npm "run linter:sass"`
## Misc Elixir command
### Translations
To extract all translations from the source, you can run `mix gettext.extract --merge` (and then edit the modified .po files).
### Check all
To perform all the checks done on the CI with a single command, you can run `mix check_all`. It will run the different linters, credo, check the translations are up-to-date, and launch the tests.
### DB migrations
To generate a new migration file:
`cd apps/transport && mix ecto.gen.migration <name of the migration> && cd ../..`
The generated [ecto](https://hexdocs.pm/ecto/Ecto.html) migration file will be `apps/transport/priv/repo/migrations/<timestamp>_<name of the migration>.exs`
To apply all migrations on you database:
`mix ecto.migrate`
### One shot tasks
Some custom one shot tasks are available.
To run a custom task: `mix <custom task>`
* `mix Transport.ImportAOMs`: import the aom data from the cerema
* `mix Transport.ImportCommunes`: import the french communes from data.gouv
* `mix Transport.ImportEPCI`: import the french EPCI from data.gouv
* `mix Transport.ImportDepartements`: import the french Départements from data.gouv
* `mix Transport.OpenApiSpec`: generate an OpenAPI specification file
## Testing emails
To locally test emails in a dev environment, a Swoosh preview inbox available in your browser at `/dev/mailbox`. Your server needs to be run through iex : `iex -S mix phx.server`.
## Learn more and debug
See the [learning track document](learning_track.md).
# Docker installation <a name="docker_install"></a>
## Development
If you don't plan to work a lot on this project, the Docker installation is way easier.
Some environment variables may be needed to configure the app, see the files in the `config` folder.
Then you only need to run:
`docker-compose up`
And access it at http://localhost:5000
You can make changes in the repository and those will be applied with hot reload.
You can run any `mix` command with:
`docker-compose run web mix <cmd>`
For the tests you also need to add an environment variable:
`docker-compose run -e web mix test`
## Production
The Dockerfile needed to run the continuous integration is in the project:
https://github.com/etalab/transport-ops
Update it if needed (e.g. updating Elixir’s version) and then update `.circleci/config.yml`.
# Domain names
The following domain names are currently in use by the deployed Elixir app:
* Production
* site: https://transport.data.gouv.fr
* jobs: https://workers.transport.data.gouv.fr
* proxy: https://proxy.transport.data.gouv.fr
* Staging
* site: https://prochainement.transport.data.gouv.fr
* jobs: https://workers.prochainement.transport.data.gouv.fr
* proxy: https://proxy.prochainement.transport.gouv.fr
These names are [configured via a CNAME on Clever Cloud](https://www.clever-cloud.com/doc/administrate/domain-names/#using-personal-domain-names).
The corresponding SSL certificates are auto-generated via Let's Encrypt and Clever Cloud.
# Uptime monitoring (updown.io)
The following URLs are currently monitored via updown.io (with email & Mattermost alerts) at various frequencies
* https://transport.data.gouv.fr/health-check (https://updown.io/pl8a) every minute
* https://workers.transport.data.gouv.fr/health-check (https://updown.io/9sto) every minute
* https://validation.transport.data.gouv.fr (https://updown.io/gndz) every 5 minutes
* https://metabase.transport.data.gouv.fr (https://updown.io/f9rd) every 5 minutes
* https://prochainement.transport.data.gouv.fr/health-check (https://updown.io/2pvz) every 5 minutes
# Useful changelogs
* https://developers.clever-cloud.com/changelog/ for Clever Cloud components (e.g. Postgres)
* [.tool-versions](.tool-versions) for Elixir & Erlang
# Blog
The project [blog](https://blog.transport.data.gouv.fr/) code and articles are hosted in the [blog](https://github.com/etalab/transport-site/tree/blog/blog) folder of the blog branch. A specific blog branch has been created with less restrictive merge rules, to allow publishing articles directly from the CMS without needing a github code review.
Technically, the blog is a hugo static website, enhanced with [netlifyCMS](https://www.netlifycms.org/) that is automatically deployed using Netlify. NetlifyCMS allows github users who have write access to this repo to write and edit articles, without the need to use git nor github.
To write or edit an article, visit https://blog.transport.data.gouv.fr/admin/.
For developement purposes, you can run the blog locally. Install [hugo](https://gohugo.io/getting-started/installing/), open a terminal, go the blog folder of the project and run `hugo serve`.
# Troubleshootings
## No usable OpenSSL found (during Erlang installation via ASDF)
MacOS come with a pre-installed version of LibreSSL which is a fork from OpenSSL.
This could cause trouble since it's considered as a "no usable OpenSSL" by Erlang.
We can fix this error in 2 steps :
1. Install OpenSSL 1.1 (via homebrew for example)
```
> brew install --prefix=openssl
```
2. Force the use of the installed version when installing erlang by setting the --with-ssl option in the KERL_CONFIGURE_OPTIONS variable.
```
> export KERL_CONFIGURE_OPTIONS="--with-ssl=$(brew --prefix --installed openssl@1.1)"
> asdf install erlang 24.0.4
```
See https://github.com/asdf-vm/asdf-erlang/issues/82.
================================================
FILE: apps/shared/lib/application.ex
================================================
defmodule Shared.Application do
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Used for streaming component, see possible config at:
# https://github.com/keathley/finch#usage
{Finch,
name: Transport.Finch,
pools: %{
# slightly larger than default
:default => [size: 25]
}},
{Cachex, name: cache_name()}
]
opts = [strategy: :one_for_one, name: Shared.Supervisor]
Supervisor.start_link(children, opts)
end
def cache_name, do: Shared.Cachex
end
================================================
FILE: apps/shared/lib/appsignal_filter.ex
================================================
defmodule TransportWeb.Plugs.AppSignalFilter do
@moduledoc """
An attempt to reduce the volume of events sent to AppSignal in order to keep a lower bill.
This sets the namespace to a well-known "ignore" value, which must be added to the
AppSignal `ignore_namespaces` config value.
See: https://github.com/etalab/transport-site/issues/3274
The plug must be activated low-enough in the pipeline, otherwise the "ignore" value
won't be used and instead the middleware value will take precedence.
See: https://github.com/appsignal/appsignal-elixir/issues/865
"""
def init(options), do: options
def call(%Plug.Conn{} = conn, _opts) do
if function_exported?(Appsignal.Tracer, :root_span, 0) do
if must_ignore?(conn) do
Appsignal.Tracer.root_span() |> Appsignal.Span.set_namespace("ignore")
end
end
conn
end
# this method allows us to filter programmatically as needed
defp must_ignore?(%Plug.Conn{} = conn) do
conn.host =~ ~r/proxy/i
end
end
================================================
FILE: apps/shared/lib/cldr.ex
================================================
defmodule Transport.Cldr do
@moduledoc """
Declares a backend for Cldr as required.
https://hexdocs.pm/ex_cldr_numbers/readme.html#introduction-and-getting-started
"""
use Cldr,
locales: ["en", "fr"],
providers: [Cldr.Number, Cldr.Calendar, Cldr.Unit, Cldr.List, Cldr.DateTime],
default_locale: "fr"
end
================================================
FILE: apps/shared/lib/conditional_json_encoder.ex
================================================
defmodule Transport.Shared.ConditionalJSONEncoder do
require Logger
@moduledoc """
Some of the JSON payloads the app sends are costly to compute, so we're
caching them. It is many times (x100) more efficient to store them as
an Elixir binary (full string) rather than as detailed Elixir maps, due to
both marshalling/unmarshalling costs from ETS, and re-encoding to JSON strings.
Here we leverage Phoenix "format encoders" to override the "encoding to JSON"
process: if the data is a well-specified tuple indicating that the code calling
`render` knows that the payload is already JSON encoded, we'll just pass the data
through.
We are using a tuple rather than just detecting that the data is binary because
there could be places where the reply is just a string (e.g. "OK"), and this would
require JSON encoding.
This bit of code makes sure we can rely on `render` (common code path) instead of
moving to `send_resp` calls to mimic `render` for JSON, so it is a bit more future-proof.
References:
* https://hexdocs.pm/phoenix/1.5.8/Phoenix.Template.html#module-format-encoders
* https://github.com/phoenixframework/phoenix/blob/38b3702fd468fea7075cdf996c19c22350fe1eec/lib/phoenix/controller.ex#L271-L286
Credit goes to Benjamin Milde on Elixir lang slack channel
for his help on this.
"""
def encode_to_iodata!({:skip_json_encoding, data}) when is_binary(data) do
Logger.info("Skipping JSON encode step (payload is already JSON encoded)")
data
end
def encode_to_iodata!(data) do
Phoenix.json_library().encode_to_iodata!(data)
end
end
================================================
FILE: apps/shared/lib/data_visualization.ex
================================================
defmodule Transport.DataVisualization do
@moduledoc """
Wrapper for DataVisualization
"""
@callback has_features(map() | nil) :: boolean()
@callback validation_data_vis(any) :: nil | map
defp impl, do: Application.get_env(:transport, :data_visualization)
def has_features(validations), do: impl().has_features(validations)
def validation_data_vis(validations), do: impl().validation_data_vis(validations)
end
defmodule Transport.DataVisualization.Impl do
@moduledoc """
Extract a geojson from a GTFS validation,
in order to provide a data visualization of the validation issues
"""
@behaviour Transport.DataVisualization
@impl Transport.DataVisualization
def has_features(nil), do: false
def has_features(data_visualization), do: not Enum.empty?(data_visualization["features"])
@impl Transport.DataVisualization
@spec validation_data_vis(any) :: nil | map
def validation_data_vis(nil), do: nil
def validation_data_vis(validations) do
validations
|> Enum.map(fn {issue_type, issues} -> {issue_type, data_vis_per_issue_type(issues)} end)
|> Enum.into(%{})
end
defp data_vis_per_issue_type(issues) do
severity = issues |> Enum.at(0) |> Map.get("severity")
geojson =
issues
|> Enum.flat_map(fn issue -> get_in(issue, ["geojson", "features"]) || [] end)
|> Enum.reject(&is_nil(&1))
%{
"severity" => severity,
"geojson" => %{"features" => geojson, "type" => "FeatureCollection"}
}
end
end
================================================
FILE: apps/shared/lib/date_time_display.ex
================================================
defmodule Shared.DateTimeDisplay do
@moduledoc """
A module to have a coherent display of dates and times accross the website.
The goal is to show date times for Europe/Paris timezone to our users.
"""
@doc """
Formats a date to display depending on the locale
iex> format_date(~D[2022-03-01], "fr")
"01/03/2022"
iex> format_date(~D[2022-03-01], "en")
"2022-03-01"
iex> format_date("2022-03-01", "fr")
"01/03/2022"
iex> format_date("2022-03-01", "sauce tomate")
"01/03/2022"
iex> format_date("2022-03-01", "en")
"2022-03-01"
iex> format_date(~U[2022-11-01 00:00:00Z], "en")
"2022-11-01"
iex> format_date("2022-02-21T14:28:09.366000+00:00", "fr", iso_extended: true)
"21/02/2022"
"""
@spec format_date(binary | Date.t() | DateTime.t(), binary() | nil) :: binary
def format_date(%DateTime{} = datetime, locale), do: format_date(DateTime.to_date(datetime), locale)
def format_date(%Date{} = date, "fr"), do: Calendar.strftime(date, "%d/%m/%Y")
def format_date(%Date{} = date, "en"), do: Calendar.strftime(date, "%Y-%m-%d")
def format_date(%Date{} = date, _), do: format_date(date, "fr")
def format_date(date, locale) when is_binary(date) do
date |> Date.from_iso8601!() |> format_date(locale)
end
def format_date(nil, _), do: ""
def format_date(date, locale, iso_extended: true) do
date |> TimeWrapper.parse!("{ISO:Extended}") |> format_date(locale)
end
@doc """
Display a date from a DateTime
iex> format_datetime_to_date(~U[2022-03-01T15:00:00Z], "fr")
"01/03/2022"
iex> format_datetime_to_date(~U[2022-03-01T15:00:00Z], "en")
"2022-03-01"
"""
def format_datetime_to_date(%DateTime{} = dt, locale) do
dt |> DateTime.to_date() |> format_date(locale)
end
def format_datetime_to_date(nil, _), do: ""
@doc """
Formats a date time for display.
Input can be in any timezone, outputs is in Europe/Paris timezone.
iex> format_datetime_to_paris(~U[2022-03-01 15:30:00+00:00], "fr")
"01/03/2022 à 16h30 Europe/Paris"
iex> format_datetime_to_paris(~U[2022-03-01 15:30:00+00:00], "en")
"2022-03-01 at 16:30 Europe/Paris"
iex> format_datetime_to_paris(~N[2022-03-01 15:30:00.0000], "en")
"2022-03-01 at 16:30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:00Z", "fr")
"01/03/2022 à 16h30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:00Z", "sauce tomate")
"01/03/2022 à 16h30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:00+01:00", "fr")
"01/03/2022 à 15h30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:00+01:00", "fr")
"01/03/2022 à 15h30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:09+01:00", "fr", with_seconds: true)
"01/03/2022 à 15:30:09 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:00+00:00", "en")
"2022-03-01 at 16:30 Europe/Paris"
iex> format_datetime_to_paris("2022-03-01T15:30:09+00:00", "en", with_seconds: true)
"2022-03-01 at 16:30:09 Europe/Paris"
# right before daylight hour change
iex> format_datetime_to_paris("2022-03-27T00:59+00:00", "fr")
"27/03/2022 à 01h59 Europe/Paris"
# right after daylight hour change
iex> format_datetime_to_paris("2022-03-27T01:00:00+00:00", "fr")
"27/03/2022 à 03h00 Europe/Paris"
iex> format_datetime_to_paris("2022-03-27T01:00:00+00:00", "fr", no_timezone: true)
"27/03/2022 à 03h00"
"""
def format_datetime_to_paris(dt, locale), do: format_datetime_to_paris(dt, locale, [])
def format_datetime_to_paris(%DateTime{} = dt, locale, options) do
format = get_localized_datetime_format(locale, options)
format = if Keyword.get(options, :no_timezone), do: format, else: format <> " Europe/Paris"
dt |> convert_to_paris_time() |> Calendar.strftime(format)
end
def format_datetime_to_paris(%NaiveDateTime{} = ndt, locale, options) do
ndt
|> convert_to_paris_time()
|> format_datetime_to_paris(locale, options)
end
def format_datetime_to_paris(datetime, locale, options) when is_binary(datetime) do
datetime
|> TimeWrapper.parse!("{ISO:Extended}")
|> format_datetime_to_paris(locale, options)
end
def format_datetime_to_paris(nil, _, _), do: ""
@doc """
Formats time of a date time for display.
Input can be in any timezone, outputs is in Europe/Paris timezone.
iex> format_time_to_paris(~U[2022-03-01 15:30:00+00:00], "fr")
"16h30 Europe/Paris"
iex> format_time_to_paris(~U[2022-03-01 15:30:00+00:00], "en")
"16:30 Europe/Paris"
iex> format_time_to_paris(~N[2022-03-01 15:30:00.0000], "en")
"16:30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:00Z", "fr")
"16h30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:00Z", "sauce tomate")
"16h30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:00+01:00", "fr")
"15h30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:00+01:00", "fr")
"15h30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:09+01:00", "fr", with_seconds: true)
"15:30:09 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:00+00:00", "en")
"16:30 Europe/Paris"
iex> format_time_to_paris("2022-03-01T15:30:09+00:00", "en", with_seconds: true)
"16:30:09 Europe/Paris"
# right before daylight hour change
iex> format_time_to_paris("2022-03-27T00:59+00:00", "fr")
"01h59 Europe/Paris"
# right after daylight hour change
iex> format_time_to_paris("2022-03-27T01:00:00+00:00", "fr")
"03h00 Europe/Paris"
iex> format_time_to_paris("2022-03-27T01:00:00+00:00", "fr", no_timezone: true)
"03h00"
"""
def format_time_to_paris(dt, locale) do
format_time_to_paris(dt, locale, [])
end
def format_time_to_paris(%DateTime{} = dt, locale, options) do
format = get_localized_time_format(locale, options)
format = if Keyword.get(options, :no_timezone), do: format, else: format <> " Europe/Paris"
dt |> convert_to_paris_time() |> Calendar.strftime(format)
end
def format_time_to_paris(%NaiveDateTime{} = ndt, locale, options) do
ndt
|> convert_to_paris_time()
|> format_time_to_paris(locale, options)
end
def format_time_to_paris(datetime, locale, options) when is_binary(datetime) do
datetime
|> TimeWrapper.parse!("{ISO:Extended}")
|> format_time_to_paris(locale, options)
end
def format_time_to_paris(nil, _, _), do: ""
@doc """
Formats a duration in seconds to display, according to a locale.
Supported locales: "fr" and "en".
iex> format_duration(1, :en)
"1 second"
iex> format_duration(1, Transport.Cldr.Locale.new!("en"))
"1 second"
iex> format_duration(1, "en")
"1 second"
iex> format_duration(3, "en")
"3 seconds"
iex> format_duration(60, "en")
"1 minute"
iex> format_duration(61, "en")
"1 minute and 1 second"
iex> format_duration(65, "en")
"1 minute and 5 seconds"
iex> format_duration(120, "en")
"2 minutes"
iex> format_duration(125, "en")
"2 minutes and 5 seconds"
iex> format_duration(3601, "en")
"1 hour and 1 second"
iex> format_duration(3661, "en")
"1 hour, 1 minute, and 1 second"
iex> format_duration(1, :fr)
"1 seconde"
iex> format_duration(1, Transport.Cldr.Locale.new!("fr"))
"1 seconde"
iex> format_duration(1, "fr")
"1 seconde"
iex> format_duration(3, "fr")
"3 secondes"
iex> format_duration(60, "fr")
"1 minute"
iex> format_duration(61, "fr")
"1 minute et 1 seconde"
iex> format_duration(65, "fr")
"1 minute et 5 secondes"
iex> format_duration(120, "fr")
"2 minutes"
iex> format_duration(125, "fr")
"2 minutes et 5 secondes"
iex> format_duration(3601, "fr")
"1 heure et 1 seconde"
iex> format_duration(3661, "fr")
"1 heure, 1 minute et 1 seconde"
"""
@spec format_duration(pos_integer(), atom() | Cldr.LanguageTag.t()) :: binary()
def format_duration(duration_in_seconds, locale) do
locale = Cldr.Locale.new!(locale, Transport.Cldr)
duration_in_seconds
|> Cldr.Calendar.Duration.new_from_seconds()
|> Cldr.Calendar.Duration.to_string!(locale: locale)
end
@doc """
iex> relative_datetime_in_days(1, "fr")
"demain"
iex> relative_datetime_in_days(0, "fr")
"aujourd’hui"
iex> relative_datetime_in_days(-1, "fr")
"hier"
iex> relative_datetime_in_days(-7, "en")
"7 days ago"
"""
@spec relative_datetime_in_days(integer(), binary()) :: binary()
def relative_datetime_in_days(days, locale) do
Transport.Cldr.DateTime.Relative.to_string!(days, unit: :day, locale: locale)
end
def relative_datetime_in_seconds(seconds, locale) do
Transport.Cldr.DateTime.Relative.to_string!(seconds, locale: locale)
end
@spec convert_to_paris_time(DateTime.t() | NaiveDateTime.t()) :: DateTime.t()
def convert_to_paris_time(%DateTime{} = dt) do
TimeWrapper.convert_to_paris_time(dt)
end
def convert_to_paris_time(%NaiveDateTime{} = ndt) do
ndt |> TimeWrapper.convert("UTC") |> convert_to_paris_time()
end
defp get_localized_datetime_format("en" = locale, options) do
"%Y-%m-%d at #{get_localized_time_format(locale, options)}"
end
defp get_localized_datetime_format(locale, options) do
"%d/%m/%Y à #{get_localized_time_format(locale, options)}"
end
defp get_localized_time_format("en", options) do
if Keyword.get(options, :with_seconds) do
"%H:%M:%S"
else
"%H:%M"
end
end
defp get_localized_time_format(_locale, options) do
if Keyword.get(options, :with_seconds) do
"%H:%M:%S"
else
"%Hh%M"
end
end
end
================================================
FILE: apps/shared/lib/hasher.ex
================================================
defmodule Hasher.Wrapper do
@moduledoc """
A Hasher wrapper, useful for testing purposes
"""
@callback get_content_hash(binary()) :: binary()
def impl, do: Application.get_env(:transport, :hasher_impl)
end
defmodule Hasher.Dummy do
@moduledoc """
A dummy module, where everything always has the same dummy hash
"""
@behaviour Hasher.Wrapper
@impl Hasher.Wrapper
def get_content_hash(_url), do: "xxx"
end
defmodule Hasher do
@moduledoc """
Hasher computes the hash sha256 of a file given by
an URL or a local path
"""
require Logger
@behaviour Hasher.Wrapper
@impl Hasher.Wrapper
def get_content_hash(url) do
case scheme = URI.parse(url).scheme do
s when s in ["http", "https"] ->
get_content_hash_http(url)
_ ->
Logger.warning("Cannot process #{scheme |> inspect} url (#{url}) at the moment. Skipping.")
nil
end
end
@spec get_content_hash_http(String.t()) :: String.t()
def get_content_hash_http(url) do
with {:ok, %{headers: headers}} <- HTTPoison.head(url),
etag when not is_nil(etag) <- Enum.find_value(headers, &find_etag/1),
content_hash <- String.replace(etag, "\"", "") do
content_hash
else
{:error, error} ->
Logger.error(fn -> "error while computing content_hash #{inspect(error)}" end)
nil
nil ->
compute_sha256(url)
end
end
@spec compute_sha256(String.t()) :: String.t()
def compute_sha256(url) do
case HTTPStreamV2.fetch_status_and_hash(url) do
{:ok, %{status: 200, hash: hash}} ->
hash
{:error, msg} ->
Logger.warning("Cannot compute hash for url #{url |> inspect}, returning empty hash. Error : #{msg |> inspect}")
# NOTE: this mimics the legacy code, and maybe we could return nil instead, but the whole
# thing isn't under tests, so I prefer to keep it like before for now.
""
end
rescue
e ->
Logger.error(
"Exception #{e |> inspect} occurred during hash computation for url #{url |> inspect}, returning empty hash"
)
""
end
@spec find_etag(keyword()) :: binary()
defp find_etag({"Etag", v}), do: v
defp find_etag(_), do: nil
def compute_checksum(stream, algorithm) do
stream
|> Enum.reduce(:crypto.hash_init(algorithm), fn elm, acc -> :crypto.hash_update(acc, elm) end)
|> :crypto.hash_final()
|> Base.encode16()
|> String.downcase()
end
def get_file_hash(file_path) do
file_path
|> File.stream!(2048)
|> compute_checksum(:sha256)
end
@doc """
Computes a single sha256 string using a ZIP metadata payload.
ZIP metadata is produced by `Transport.ZipMetaDataExtractor.extract!/1`.
iex> zip_hash([%{"compressed_size" => 41, "file_name" => "ExportService.checksum.md5", "last_modified_datetime" => "2017-02-16T05:01:12", "sha256" => "f0c7216411dec821330ffbebf939bfe73a50707f5e443795a122ec7bef37aa16", "uncompressed_size" => 47}, %{"compressed_size" => 115, "file_name" => "agency.txt", "last_modified_datetime" => "2017-02-16T05:01:12", "sha256" => "548de694a86ab7d6ac0cd3535b0c3b8bffbabcc818e8d7f5a4b8f17030adf617", "uncompressed_size" => 143}])
"ddb5bc46003dbe71c98edcbbd4d5c6e9a101b8727a749a84ac4e777fd2302732"
"""
def zip_hash(zip_metadata) when is_list(zip_metadata) do
zip_metadata
|> Enum.sort_by(&get_signature(&1))
|> Stream.map(&get_signature(&1))
|> compute_checksum(:sha256)
end
@doc """
Computes the signature of a ZIP metadata item.
We concatenate the filename and its sha256 together.
Using the sha256 alone is not enough because the ZIP archive hash
would be the same when renaming files without changing their content.
iex> get_signature(%{"compressed_size" => 41, "file_name" => "file.txt", "last_modified_datetime" => "2017-02-16T05:01:12", "sha256" => "f0c7216411dec821330ffbebf939bfe73a50707f5e443795a122ec7bef37aa16", "uncompressed_size" => 47})
"file.txtf0c7216411dec821330ffbebf939bfe73a50707f5e443795a122ec7bef37aa16"
"""
def get_signature(zip_metadata_item) when is_map(zip_metadata_item) do
map_get(zip_metadata_item, :file_name) <> map_get(zip_metadata_item, :sha256)
end
defp map_get(map, key) when key in [:sha256, :file_name] do
# At the moment zip_metadata may have atom keys (when coming from Elixir)
# or string keys (when coming from the database).
# Guard is here to prevent against other usages.
Map.get(map, key) || Map.get(map, to_string(key))
end
end
================================================
FILE: apps/shared/lib/helpers.ex
================================================
defmodule Helpers do
@moduledoc """
Helper functions that are used accross the whole project
"""
require Logger
@doc """
Gets the filename part of an url
## Examples
iex> Helpers.filename_from_url("https://example.com/gtfs.zip")
"gtfs.zip"
iex> Helpers.filename_from_url("https://example.com/foo/bar/baz/bobette/")
"bobette"
"""
@spec filename_from_url(binary()) :: binary()
def filename_from_url(url) when is_binary(url) do
url
|> URI.parse()
|> Map.get(:path)
|> String.trim_trailing("/")
|> String.split("/")
|> List.last()
end
def filename_from_url(_), do: nil
@doc """
Formats numbers.
See options: https://hexdocs.pm/ex_cldr_numbers/readme.html#primary-public-api
## Examples
iex> Helpers.format_number(12_345)
"12 345"
iex> Helpers.format_number(12_345.42)
"12 345,42"
iex> Helpers.format_number(12_345, locale: "en")
"12,345"
"""
def format_number(n, options \\ []) when is_number(n) do
{:ok, res} = Transport.Cldr.Number.to_string(n, options)
res
end
@doc """
Formats numbers, allowing for nil to be passed and formatted specifically
## Examples
iex> Helpers.format_number_maybe_nil(12_345, nil_result: "N/C")
"12 345"
iex> Helpers.format_number_maybe_nil(nil, nil_result: "N/C")
"N/C"
"""
def format_number_maybe_nil(nil, options), do: options |> Keyword.fetch!(:nil_result)
def format_number_maybe_nil(n, options), do: format_number(n, options |> Keyword.delete(:nil_result))
@spec last_updated([DB.Resource.t()]) :: binary()
def last_updated(resources) do
resources
|> Enum.map(& &1.last_update)
|> case do
[] -> nil
dates -> dates |> Enum.max(DateTime) |> DateTime.to_iso8601()
end
end
end
================================================
FILE: apps/shared/lib/http_stream_v2.ex
================================================
defmodule HTTPStreamV2 do
@moduledoc """
A new module able to compute checksum of a given URL via streaming, all
while retaining extra information such as HTTP status, body size, and headers.
"""
@doc """
Issue a streamed GET request, computing the SHA256 of the payload on the fly,
and returning the result as a map:
```
%{
status: 200,
hash: "95ab5b2602d6a21d7efcbc87de641c59f2ecc7510a1aa0d20708f122faf172ca",
headers: [...],
body_byte_size: 123456
}
```
The headers are kept around for a variety of reasons. We could use them to follow a redirect,
double-check the etag, verify the content type etc.
"""
# same as HTTPoison
@redirect_status [301, 302, 307]
@default_allowed_redirects 5
@spec fetch_status_and_hash(binary(), integer(), integer()) :: {:ok, map()} | {:error, any()}
def fetch_status_and_hash(url, max_redirect \\ @default_allowed_redirects, redirect_count \\ 0)
def fetch_status_and_hash(_url, max_redirect, redirect_count)
when redirect_count > max_redirect do
{:error, "maximum number of redirect reached"}
end
def fetch_status_and_hash(url, max_redirect, redirect_count) do
request = Finch.build(:get, URI.encode(url))
try do
{:ok, result} = Finch.stream(request, Transport.Finch, %{}, &handle_stream_response/2)
{:ok, compute_final_hash(result)}
catch
{:redirect, redirect_url} ->
fetch_status_and_hash(redirect_url, max_redirect, redirect_count + 1)
{:error, e} ->
{:error, e}
end
end
defp handle_stream_response({:status, status}, acc) do
acc
|> Map.put(:status, status)
|> Map.put(:hash, :crypto.hash_init(:sha256))
|> Map.put(:body_byte_size, 0)
end
defp handle_stream_response({:headers, headers}, acc) do
case acc.status do
status when status in @redirect_status ->
headers
|> location_header()
|> case do
nil -> throw({:error, "no redirection url provided"})
{_, redirect_url} -> throw({:redirect, redirect_url})
end
_ ->
acc |> Map.put(:headers, headers)
end
end
defp handle_stream_response({:data, data}, acc) do
hash = :crypto.hash_update(acc.hash, data)
%{acc | hash: hash, body_byte_size: acc[:body_byte_size] + (data |> byte_size)}
end
defp compute_final_hash(result) do
hash =
result
|> Map.fetch!(:hash)
|> :crypto.hash_final()
|> Base.encode16()
|> String.downcase()
%{result | hash: hash}
end
@spec fetch_status(binary()) :: {:ok, map()} | {:error, any()}
def fetch_status(url) do
request = Finch.build(:get, URI.encode(url))
Finch.stream(request, Transport.Finch, %{}, &handle_stream_status/2)
catch
# when status is fetched, a throw is used to stop the streaming and exit with the needed information
{:status_fetched, status} -> status
e -> {:error, e}
end
defp location_header(headers) do
headers |> Enum.find(fn {k, _v} -> String.downcase(k) == "location" end)
end
defp handle_stream_status({:status, status}, acc) do
acc = acc |> Map.put(:status, status)
if status not in @redirect_status do
# we know everything we need to know
throw({:status_fetched, {:ok, acc}})
end
acc
end
defp handle_stream_status({:headers, headers}, acc) do
acc =
headers
|> location_header()
|> case do
nil -> acc
{_, url} -> acc |> Map.put(:location, url)
end
throw({:status_fetched, {:ok, acc}})
end
def fetch_status_follow_redirect(
url,
max_redirect \\ @default_allowed_redirects,
redirect_count \\ 0
)
def fetch_status_follow_redirect(_url, max_redirect, redirect_count)
when redirect_count > max_redirect do
{:error, "maximum number of redirect reached"}
end
def fetch_status_follow_redirect(url, max_redirect, redirect_count) do
case fetch_status(url) do
{:ok, %{status: status, location: redirect_url}} when status in @redirect_status ->
fetch_status_follow_redirect(redirect_url, max_redirect, redirect_count + 1)
{:ok, %{status: status}} ->
{:ok, status}
_ ->
{:error, "error while fetching status"}
end
end
end
================================================
FILE: apps/shared/lib/proxy.ex
================================================
defmodule Shared.Proxy do
@moduledoc """
Shared methods useful when proxying requests in our apps.
"""
@doc """
A list of HTTP headers that will be forwarded by our proxy.
For now we use an allowlist we can gradually expand.
Make sure to avoid including "hop-by-hop" headers here.
https://book.hacktricks.xyz/pentesting-web/abusing-hop-by-hop-headers
"""
def forwarded_headers_allowlist do
[
"content-type",
"content-length",
"date",
"last-modified",
"etag"
]
end
end
================================================
FILE: apps/shared/lib/req_custom_cache.ex
================================================
defmodule Transport.Shared.ReqCustomCache do
@moduledoc """
A simple HTTP cache for `req` that do not use headers. If the file is not found
on disk, the download will occur, otherwise response will be read from disk.
At this point, this module is more designed for development use (with production data)
than for production use (in particular, security implications of `:erlang.binary_to_term`
and `:erlang.term_to_binary`).
"""
require Logger
def attach(%Req.Request{} = request, options \\ []) do
request
|> Req.Request.register_options([:custom_cache_dir])
|> Req.Request.merge_options(options)
|> Req.Request.append_request_steps(custom_cache: &request_local_cache_step/1)
|> Req.Request.prepend_response_steps(custom_cache: &response_local_cache_step/1)
end
def request_local_cache_step(request) do
# NOTE: for now, no expiration is supported, you'll have to wipe-out the cache folder manually
# NOTE: race condition here, for parallel queries
path = cache_path(request)
if File.exists?(path) do
# Logger.info("File found in cache (#{path})")
{request, load_cache(path)}
else
request
end
end
def response_local_cache_step({request, response}) do
# NOTE: we'll need a way to let the caller customize which HTTP status codes must result
# into caching vs not (e.g. rate limit 429 should ideally not be cached, while 404 should etc)
path = cache_path(request)
unless File.exists?(path) do
Logger.info("Saving file to cache (#{path})")
write_cache(path, response)
end
{request, response}
end
# https://github.com/wojtekmach/req/blob/102b9aa6c6ff66f00403054a0093c4f06f6abc2f/lib/req/steps.ex#L1268
def cache_path(cache_dir, %{method: :get} = request) do
cache_key =
Enum.join(
[
request.url.host,
Atom.to_string(request.method),
:crypto.hash(:sha256, :erlang.term_to_binary(request.url))
|> Base.encode16(case: :lower)
],
"-"
)
Path.join(cache_dir, cache_key)
end
def cache_path(request) do
cache_path(request.options[:custom_cache_dir], request)
end
# https://github.com/wojtekmach/req/blob/102b9aa6c6ff66f00403054a0093c4f06f6abc2f/lib/req/steps.ex#L1288-L1290
def load_cache(path) do
path |> File.read!() |> :erlang.binary_to_term()
end
# https://github.com/wojtekmach/req/blob/102b9aa6c6ff66f00403054a0093c4f06f6abc2f/lib/req/steps.ex#L1283-L1286
def write_cache(path, response) do
File.mkdir_p!(Path.dirname(path))
File.write!(path, :erlang.term_to_binary(response))
end
end
================================================
FILE: apps/shared/lib/resource_schema.ex
================================================
defmodule Transport.Shared.ResourceSchema do
@moduledoc """
Guess schema names and versions for resources
"""
import Helpers, only: [filename_from_url: 1]
@spec guess_name(map(), binary()) :: binary() | nil
@doc """
Guess a schema name for a resource.
## Examples
iex> guess_name(%{}, "public-transit")
nil
iex> guess_name(%{"format" => "json"}, "public-transit")
nil
iex> guess_name(%{"format" => "JSON", "url" => "https://example.com/zfe_zone_nom.json"}, "road-data")
"etalab/schema-zfe"
iex> guess_name(%{"format" => "json", "url" => "https://example.com/nope.zip"}, "road-data")
nil
iex> guess_name(%{"schema" => %{"name" => "etalab/schema-zfe"}}, "road-data")
"etalab/schema-zfe"
"""
def guess_name(%{"schema" => %{"name" => schema}}, _dataset_type) do
schema
end
def guess_name(%{"url" => url, "format" => format}, "road-data") do
appropriate_format = Enum.member?(["json", "geojson"], String.downcase(format))
appropriate_filename = url |> filename_from_url() |> String.starts_with?("zfe")
if appropriate_format and appropriate_filename, do: "etalab/schema-zfe"
end
def guess_name(_, _), do: nil
@spec guess_version(map()) :: binary() | nil
@doc """
Guess a schema version for a resource.
## Examples
iex> guess_version(%{"schema" => %{"version" => "1.1"}})
"1.1"
iex> guess_version(%{})
nil
"""
def guess_version(%{"schema" => %{"version" => version}}) do
version
end
def guess_version(_), do: nil
end
================================================
FILE: apps/shared/lib/s3.ex
================================================
defmodule Transport.S3 do
@moduledoc """
This module contains common code related to S3 object storage.
"""
require Logger
@type bucket_feature :: :history | :on_demand_validation | :gtfs_diff | :logos | :aggregates
@spec bucket_name(bucket_feature()) :: binary()
def bucket_name(feature) do
config = Application.fetch_env!(:transport, :s3_buckets)
"transport-data-gouv-fr-#{Map.fetch!(config, feature)}"
end
@spec permanent_url(bucket_feature(), binary()) :: binary()
def permanent_url(feature, path \\ "") do
base_url = :io_lib.format(Application.fetch_env!(:ex_aws, :cellar_url), [bucket_name(feature)]) |> to_string()
if String.length(path) > 0 do
base_url |> URI.parse() |> URI.append_path("/" <> path) |> URI.to_string()
else
base_url
end
end
@spec bucket_names() :: [binary()]
def bucket_names do
buckets_response = ExAws.S3.list_buckets() |> Transport.Wrapper.ExAWS.impl().request!()
buckets_response.body.buckets |> Enum.map(& &1.name)
end
@spec delete_object!(bucket_feature(), binary()) :: any()
def delete_object!(feature, path) do
bucket = bucket_name(feature)
bucket |> ExAws.S3.delete_object(path) |> Transport.Wrapper.ExAWS.impl().request!()
end
@spec stream_to_s3!(bucket_feature(), binary(), binary(), acl: atom(), cache_control: binary()) :: any()
def stream_to_s3!(feature, local_path, upload_path, options \\ []) do
Logger.debug("Streaming #{local_path} to #{upload_path}")
options = Keyword.validate!(options, [:cache_control, {:acl, :private}])
local_path
|> ExAws.S3.Upload.stream_file()
|> ExAws.S3.upload(Transport.S3.bucket_name(feature), upload_path, options)
|> Transport.Wrapper.ExAWS.impl().request!()
end
@spec download_file(bucket_feature(), binary(), binary()) :: any()
def download_file(feature, remote_path, local_path) do
Logger.debug("Downloading #{remote_path} to #{local_path}")
feature
|> Transport.S3.bucket_name()
|> ExAws.S3.download_file(remote_path, local_path)
|> Transport.Wrapper.ExAWS.impl().request!()
end
@doc """
Based on the provided `bucket_feature` (see typespec at top), compute
the actual bucket name, and issues a fail-fast get_object response.
Based on `ExAWS.request!`, so "will either return the successful response from AWS or raise an exception"
See: https://hexdocs.pm/ex_aws/ExAws.html#request!/2
"""
@spec get_object!(bucket_feature(), binary()) :: binary()
def get_object!(feature, remote_path) do
Logger.debug("Getting object from #{remote_path} into RAM")
feature
|> Transport.S3.bucket_name()
|> ExAws.S3.get_object(remote_path)
|> Transport.Wrapper.ExAWS.impl().request!()
end
@spec download_file!(bucket_feature(), binary(), binary() | :memory) :: ExAws.S3.Download.t()
def download_file!(feature, remote_path, dest) do
feature
|> Transport.S3.bucket_name()
|> ExAws.S3.download_file(remote_path, dest)
|> Transport.Wrapper.ExAWS.impl().request!()
end
@spec head_object!(bucket_feature(), binary()) :: map()
def head_object!(feature, remote_path) do
feature
|> Transport.S3.bucket_name()
|> ExAws.S3.head_object(remote_path)
|> Transport.Wrapper.ExAWS.impl().request!()
end
@spec remote_copy_file!(bucket_feature(), binary(), binary()) :: any()
def remote_copy_file!(feature, remote_path_src, remote_path_dest) do
bucket = Transport.S3.bucket_name(feature)
ExAws.S3.put_object_copy(bucket, remote_path_dest, bucket, remote_path_src)
|> Transport.Wrapper.ExAWS.impl().request!()
end
end
================================================
FILE: apps/shared/lib/sentry_exception_filter.ex
================================================
defmodule Transport.Shared.SentryExceptionFilter do
@moduledoc """
This module is used to avoid spamming our Sentry server
and thus consuming our events quota.
See https://hexdocs.pm/sentry/Sentry.html#module-filtering-exceptions.
Implementation based on
https://github.com/getsentry/sentry-elixir/blob/master/lib/sentry/default_event_filter.ex
"""
@behaviour Sentry.EventFilter
@ignored_plug_exceptions [
# default ones
Phoenix.Router.NoRouteError,
Plug.Parsers.RequestTooLarge,
Plug.Parsers.BadEncodingError,
Plug.Parsers.ParseError,
Plug.Parsers.UnsupportedMediaTypeError,
# our additions
Ecto.NoResultsError,
Phoenix.NotAcceptableError
]
def exclude_exception?(%x{}, :plug) when x in @ignored_plug_exceptions do
true
end
# "Ignore Plug route not found exception"
def exclude_exception?(%FunctionClauseError{function: :do_match, arity: 4}, :plug), do: true
def exclude_exception?(_exception, _source), do: false
end
================================================
FILE: apps/shared/lib/syntax_colors.ex
================================================
defmodule Transport.Inspect do
@moduledoc """
While creating scripts (`mix run script.exs`), it is useful to
color-pretty-print data structures, like `IEx` does.
This module provides tooling for this.
"""
# Taken from https://github.com/elixir-lang/elixir/blob/7ec5cc63e67de75816ae018766331fcf9c55faa8/lib/iex/lib/iex/config.ex#L107
# which does not publicly expose this currently.
@syntax_colors [
atom: :cyan,
string: :green,
number: :yellow,
list: :default_color,
boolean: :magenta,
nil: :magenta,
tuple: :default_color,
binary: :default_color,
map: :default_color
]
@doc """
Expose syntax color for use with `IO.inspect`'s `syntax_colors` option
"""
def syntax_colors, do: @syntax_colors
@doc """
Shortcut to color-pretty-print something easily
"""
def pretty_inspect(data) do
# NOTE: disabling credo here is not ideal ; we should have an equivalent check instead
# to ensure we don't leave stuff like this in the codebase (usually it's a debugging
# statement), but that will do for now.
# credo:disable-for-next-line Credo.Check.Warning.IoInspect
IO.inspect(data, syntax_colors: syntax_colors())
end
end
================================================
FILE: apps/shared/lib/time_wrapper.ex
================================================
defmodule TimeWrapper do
@moduledoc """
This module concentrates all the calls to `Timex` in a single place.
The idea behind this module is 1. to reduce our dependency on `Timex`, and
2. to ideally gradually replace calls by built-in Elixir `DateTime` calls, since
`Timex` filled a void in the language that has been partially filled now.
"""
# credo:disable-for-this-file Credo.Check.Warning.ForbiddenModule
def parse!(date_as_string, "{ISO:Extended}" = param) do
Timex.parse!(date_as_string, param)
end
def parse!(date_as_string, "{YYYY}{0M}{0D}" = param) do
Timex.parse!(date_as_string, param)
end
# NOTE: try not to use this, we will remove it. This is rfc2822 ;
# Plug encodes it, but there is no built-in decoder.
def parse!(datetime_as_string, "{WDshort}, {D} {Mshort} {YYYY} {h24}:{m}:{s} GMT" = param) do
Timex.parse!(datetime_as_string, param)
end
def diff(first, second, :hours = param) do
Timex.diff(first, second, param)
end
def now do
Timex.now()
end
def shift(dt, months: months) do
Timex.shift(dt, months: months)
end
def convert(dt, "UTC") do
Timex.Timezone.convert(dt, "UTC")
end
def convert_to_paris_time(dt) do
case Timex.Timezone.convert(dt, "Europe/Paris") do
%Timex.AmbiguousDateTime{after: dt} -> dt
%DateTime{} = dt -> dt
end
end
end
================================================
FILE: apps/shared/lib/validation/gbfs_validator.ex
================================================
defmodule Shared.Validation.GBFSValidator do
@moduledoc """
A module to validate GBFS feeds
"""
defmodule Summary do
@moduledoc """
A structure holding validation results for a GBFS feed
"""
@enforce_keys [:has_errors, :errors_count, :version_detected, :version_validated, :validator_version, :validator]
@derive Jason.Encoder
defstruct has_errors: false,
errors_count: nil,
version_detected: nil,
version_validated: nil,
validator_version: nil,
validator: nil
@type t :: %__MODULE__{
has_errors: boolean,
errors_count: integer,
version_detected: binary,
version_validated: binary,
validator_version: binary,
validator: module
}
end
defmodule Wrapper do
@moduledoc """
This behaviour defines the API for a GBFS Validator
"""
defp impl, do: Application.get_env(:transport, :gbfs_validator_impl)
@callback validate(binary()) :: {:ok, Summary.t()} | {:error, binary()}
def validate(url), do: impl().validate(url)
end
defmodule HTTPValidatorClient do
@moduledoc """
An HTTP GBFS Validator calling a third party API
"""
@timeout 15_000
@behaviour Wrapper
require Logger
def validate(url) do
with {:ok, %{status_code: 200, body: response}} <- call_api(url),
{:ok, json} <- Jason.decode(response),
{:has_errors_count, true} <- {:has_errors_count, is_integer(json["summary"]["errorsCount"])} do
{:ok,
%Summary{
has_errors: json["summary"]["hasErrors"],
errors_count: json["summary"]["errorsCount"],
version_detected: json["summary"]["version"]["detected"],
version_validated: json["summary"]["version"]["validated"],
validator_version: json["summary"]["validatorVersion"],
validator: __MODULE__
}}
else
e ->
message = "impossible to query GBFS Validator: #{inspect(e)}"
Logger.error(message)
{:error, message}
end
end
defp validator_url, do: Application.fetch_env!(:transport, :gbfs_validator_url)
defp call_api(url) do
body = Jason.encode!(%{url: url})
headers = [{"content-type", "application/json"}, {"user-agent", Application.get_env(:transport, :contact_email)}]
Transport.Shared.Wrapper.HTTPoison.impl().post(validator_url(), body, headers, recv_timeout: @timeout)
end
end
end
================================================
FILE: apps/shared/lib/validation/gtfs_validator.ex
================================================
defmodule Shared.Validation.GtfsValidator.Wrapper do
@moduledoc """
A wrapper for GtfsValidator
"""
def impl, do: Application.get_env(:transport, :gtfs_validator, Shared.Validation.GtfsValidator)
end
defmodule Shared.Validation.GtfsValidator do
@moduledoc """
GTFS validation module.
Generate validation report when validation is done.
Actually validation is delegated to an external service called via HTTP.
"""
@behaviour Shared.Validation.Validator
alias Shared.Validation.Validator
require Logger
@timeout 180_000
@url_property_not_set_error "Property gtfs_validator_url is not set. Set it and restart server"
@doc """
Validate a given GTFS file.
GTFS must be a zip file as binary.
Return {:ok, validation_report} if validation succeed with or without errors.
Return {:error} if validation cannot be done.
"""
@spec validate(binary()) :: {:ok, map()} | {:error, binary()}
def validate(gtfs),
do:
build_validate_url()
|> send_post_request(gtfs)
|> handle_validation_response()
@impl Validator
@spec validate_from_url(binary()) :: {:ok, map()} | {:error, binary()}
def validate_from_url(gtfs_url),
do:
gtfs_url
|> remote_gtfs_validation_query()
|> send_get_request()
|> handle_validation_response()
defp build_validate_url, do: gtfs_validator_base_url() <> "/validate"
def remote_gtfs_validation_query(gtfs_url) do
build_validate_url()
|> (&(&1 <> "?url=#{URI.encode_www_form(gtfs_url)}")).()
end
defp gtfs_validator_base_url do
case Application.fetch_env(:transport, :gtfs_validator_url) do
{:ok, url} -> url
_ -> raise @url_property_not_set_error
end
end
defp handle_validation_response({:ok, %{status_code: 200, body: body}}) do
case Jason.decode(body) do
{:ok, decoded} ->
{:ok, decoded}
{:error, error} ->
Logger.error(error)
{:error, "Error while decoding GTFS validator response"}
end
end
defp handle_validation_response({_, %{body: body}}) do
Logger.error(body)
{:error, "Error while requesting GTFS validator"}
end
defp handle_validation_response({:error, _}) do
{:error, "Error while requesting GTFS validator"}
end
defp http_client, do: Application.fetch_env!(:transport, :httpoison_impl)
defp send_get_request(url), do: http_client().get(url, [], recv_timeout: @timeout)
defp send_post_request(url, body), do: http_client().post(url, body, [], recv_timeout: @timeout)
end
================================================
FILE: apps/shared/lib/validation/validator.ex
================================================
defmodule Shared.Validation.Validator do
@moduledoc """
Describe the behaviour of a resource validator.
"""
# @doc """
# Validate the given resource.
# """
# # @callback validate(binary()) :: {:ok, map()} | {:error, binary()}
@doc """
Validate the resource from the given URL.
"""
@callback validate_from_url(binary()) :: {:ok, map()} | {:error, binary()}
end
================================================
FILE: apps/shared/lib/wrapper/wrapper_httpoison.ex
================================================
defmodule Transport.Shared.Wrapper.HTTPoison do
@moduledoc """
Temporary: a HTTPoison wrapper currently used by some modules in
order to facilitate the use of mocks.
Ultimately we will create a central HTTP behaviour with all common calls,
and stop using HTTPoison or Finch directly except in lower level parts.
"""
def impl, do: Application.get_env(:transport, :httpoison_impl)
end
================================================
FILE: apps/shared/lib/wrapper/wrapper_req.ex
================================================
defmodule Transport.Req.Behaviour do
@moduledoc """
At time of writing, Req does not introduce a behaviour allowing us to "Mox", as described here:
- https://github.com/wojtekmach/req/issues/143
- https://github.com/wojtekmach/req/issues/246
We introduce an "above-level" wrapper with only the specific bits we are interested in,
in order to allow the use of Mox during tests.
"""
# Ref: https://github.com/wojtekmach/req/blob/b40de7b7a0e7cc97a2c398ffcc42aa14962f3963/lib/req.ex#L545
@type url() :: URI.t() | String.t()
# Simplified version for our needs
@callback get(url()) :: {:ok, Req.Response.t()} | {:error, Exception.t()}
@callback get(url(), options :: keyword()) :: {:ok, Req.Response.t()} | {:error, Exception.t()}
@callback get!(url() | keyword() | Req.Request.t()) :: Req.Response.t()
@callback get!(url() | keyword() | Req.Request.t(), options :: keyword()) :: Req.Response.t()
@callback request(request :: Req.Request.t() | keyword()) ::
{:ok, Req.Response.t()} | {:error, Exception.t()}
@callback request(request :: Req.Request.t() | keyword(), options :: keyword()) ::
{:ok, Req.Response.t()} | {:error, Exception.t()}
@callback delete(url() | keyword() | Req.Request.t()) ::
{:ok, Req.Response.t()} | {:error, Exception.t()}
@callback delete(url() | keyword() | Req.Request.t(), options :: keyword()) ::
{:ok, Req.Response.t()} | {:error, Exception.t()}
end
defmodule Transport.Req do
@moduledoc """
The wrapper for the behaviour, which acts as a central access point for `Req` operations.
By default the implementation is itself & delegates to `Req` directly. During tests, a Mox
# is configured instead
"""
def impl, do: Application.get_env(:transport, :req_impl, __MODULE__)
@behaviour Transport.Req.Behaviour
defdelegate get(url, options \\ []), to: Req
defdelegate get!(url, options \\ []), to: Req
defdelegate request(request, options \\ []), to: Req
defdelegate delete(request, options \\ []), to: Req
end
defmodule Transport.HTTPClient do
@moduledoc """
An experimental Req higher-level wrapper client that we can Mox, supporting
easy opt-in cache enabling (crucial for large HTTP development locally).
"""
def get!(url, options) do
{req, options} = setup_cache(options)
Transport.Req.impl().get!(req, options |> Keyword.merge(url: url))
end
def get(url, options) do
{req, options} = setup_cache(options)
Transport.Req.impl().get(req, options |> Keyword.merge(url: url))
end
defp setup_cache(options) do
options =
Keyword.validate!(options, [
:custom_cache_dir,
:decode_body,
:compressed,
:into,
enable_cache: false
])
req = Req.new()
{enable_cache, options} = options |> Keyword.pop!(:enable_cache)
if enable_cache do
{req |> Transport.Shared.ReqCustomCache.attach(), options}
else
{req, options}
end
end
end
================================================
FILE: apps/shared/lib/wrapper_ex_aws.ex
================================================
defmodule Transport.Wrapper.ExAWS do
@moduledoc """
Central access point for the ExAWS behaviour defined at
https://github.com/ex-aws/ex_aws/blob/master/lib/ex_aws/behaviour.ex
in order to provide easy mocking during tests.
"""
def impl, do: Application.get_env(:transport, :ex_aws_impl)
end
================================================
FILE: apps/shared/meta/schema-irve-dynamique.json
================================================
{
"$schema": "https://frictionlessdata.io/schemas/table-schema.json",
"name": "schema-irve-dynamique",
"title": "IRVE dynamique",
"description": "Spécification du fichier d'échange relatif aux données concernant la localisation géographique et les caractéristiques techniques des stations et des points de recharge pour véhicules électriques",
"countryCode": "FR",
"homepage": "https://github.com/etalab/schema-irve",
"path": "https://raw.githubusercontent.com/etalab/schema-irve/v2.3.0/dynamique/schema-dynamique.json",
"resources": [
{
"title": "Exemple de fichier IRVE valide",
"path": "https://raw.githubusercontent.com/etalab/schema-irve/v2.3.0/dynamique/exemple-valide-dynamique.csv"
}
],
"sources": [
{
"title": "Décret n° 2017-26 du 12 janvier 2017 relatif aux infrastructures de recharge pour véhicules électriques et portant diverses mesures de transposition de la directive 2014/94/UE du Parlement européen et du Conseil du 22 octobre 2014 sur le déploiement d’une infrastructure pour carburants alternatifs",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860620"
},
{
"title": "Arrêté du 12 janvier 2017 relatif aux données concernant la localisation géographique et les caractéristiques techniques des stations et des points de recharge pour véhicules électriques",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860733"
},
{
"title": "Arrêté du 12 janvier 2017 précisant les dispositions relatives aux identifiants des unités d’exploitation pour la recharge des véhicules électriques",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860743"
},
{
"title": "Définition et structure des identifiants attribués par l'Association Française pour l'Itinérance de la Recharge Electrique des Véhicules (AFIREV)",
"path": "http://www.afirev.fr/fr/informations-generales/"
}
],
"created": "2022-10-28",
"lastModified": "2024-01-16",
"version": "v2.3.0",
"contributors": [
{
"title": "Geoffrey Aldebert",
"email": "geoffrey.aldebert@data.gouv.fr",
"organisation": "Etalab",
"role": "author"
},
{
"title": "Christina Laumond",
"email": "christina.laumond@beta.gouv.fr",
"organisation": "transport.data.gouv.fr",
"role": "author"
},
{
"title": "Thibaut Barrère",
"email": "thibaut.barrere@beta.gouv.fr",
"organisation": "transport.data.gouv.fr",
"role": "author"
}
],
"fields": [
{
"name": "id_pdc_itinerance",
"description": "L'identifiant du point de recharge, tel qu'apparaissant dans le schéma statique. Doit permettre de faire le lien entre le dynamique et le statique.",
"example": "FRA68E680210015",
"type": "string",
"constraints": {
"required": true,
"pattern": "^[A-Z]{2}[A-Z0-9]{4,33}$"
}
},
{
"name": "etat_pdc",
"description": "`etat_pdc` caractérise l’état de fonctionnement du point de recharge : est-il en service ou hors service ? En l’absence d’information, etat_pdc sera égal à ‘inconnu’.",
"example": "en_service",
"type": "string",
"constraints": {
"required": true,
"enum": [
"en_service",
"hors_service",
"inconnu"
]
}
},
{
"name": "occupation_pdc",
"description": "`occupation_pdc` caractérise l’occupation du point de recharge : est-il libre, occupé ou réservé ? En l’absence d’information, occupation_pdc sera égal à ‘inconnu’.",
"example": "occupe",
"type": "string",
"constraints": {
"required": true,
"enum": [
"libre",
"occupe",
"reserve",
"inconnu"
]
}
},
{
"name": "horodatage",
"description": "Indique la date et heure de remontée de l’information publiée, formaté selon la norme ISO 8601",
"example": "2023-01-30T10:27:50+01:00",
"type": "datetime",
"constraints": {
"required": true
}
},
{
"name": "etat_prise_type_2",
"description": "`etat_prise_type_2` indique l’état de fonctionnement du connecteur T2 : est-il fonctionnel ou hors-service ? En l’absence d’information, indiquer ‘inconnu’. En l’absence de connecteur de ce type sur le point de recharge, laisser une chaîne de caractère vide.",
"example": "fonctionnel",
"type": "string",
"constraints": {
"required": false,
"enum": [
"fonctionnel",
"hors_service",
"inconnu"
]
}
},
{
"name": "etat_prise_type_combo_ccs",
"description": "`etat_prise_type_combo_ccs` indique l’état de fonctionnement du connecteur Combo CCS : est-il fonctionnel ou hors-service ? En l’absence d’information, indiquer ‘inconnu’. En l’absence de connecteur de ce type sur le point de recharge, laisser une chaîne de caractère vide.",
"example": "hors_service",
"type": "string",
"constraints": {
"required": false,
"enum": [
"fonctionnel",
"hors_service",
"inconnu"
]
}
},
{
"name": "etat_prise_type_chademo",
"description": "`etat_prise_type_chademo` indique l’état de fonctionnement du connecteur Chademo : est-il fonctionnel ou hors-service ? En l’absence d’information, indiquer ‘inconnu’. En l’absence de connecteur de ce type sur le point de recharge, laisser une chaîne de caractère vide.",
"example": "inconnu",
"type": "string",
"constraints": {
"required": false,
"enum": [
"fonctionnel",
"hors_service",
"inconnu"
]
}
},
{
"name": "etat_prise_type_ef",
"description": "`etat_prise_type_ef` indique l’état de fonctionnement du connecteur EF : est-il fonctionnel ou hors-service ? En l’absence d’information, indiquer ‘inconnu’. En l’absence de connecteur de ce type sur le point de recharge, laisser une chaîne de caractère vide.",
"example": "fonctionnel",
"type": "string",
"constraints": {
"required": false,
"enum": [
"fonctionnel",
"hors_service",
"inconnu"
]
}
}
],
"missingValues": [
""
]
}
================================================
FILE: apps/shared/meta/schema-irve-statique.json
================================================
{
"$schema": "https://frictionlessdata.io/schemas/table-schema.json",
"name": "schema-irve-statique",
"title": "IRVE statique",
"description": "Spécification du fichier d'échange relatif aux données concernant la localisation géographique et les caractéristiques techniques des stations et des points de recharge pour véhicules électriques",
"countryCode": "FR",
"homepage": "https://github.com/etalab/schema-irve",
"path": "https://raw.githubusercontent.com/etalab/schema-irve/v2.3.0/statique/schema-statique.json",
"resources": [
{
"title": "Exemple de fichier IRVE valide",
"path": "https://raw.githubusercontent.com/etalab/schema-irve/v2.3.0/statique/exemple-valide-statique.csv"
}
],
"sources": [
{
"title": "Décret n° 2017-26 du 12 janvier 2017 relatif aux infrastructures de recharge pour véhicules électriques et portant diverses mesures de transposition de la directive 2014/94/UE du Parlement européen et du Conseil du 22 octobre 2014 sur le déploiement d’une infrastructure pour carburants alternatifs",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860620"
},
{
"title": "Arrêté du 12 janvier 2017 relatif aux données concernant la localisation géographique et les caractéristiques techniques des stations et des points de recharge pour véhicules électriques",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860733"
},
{
"title": "Arrêté du 12 janvier 2017 précisant les dispositions relatives aux identifiants des unités d’exploitation pour la recharge des véhicules électriques",
"path": "https://www.legifrance.gouv.fr/jo_pdf.do?id=JORFTEXT000033860743"
},
{
"title": "Définition et structure des identifiants attribués par l'Association Française pour l'Itinérance de la Recharge Electrique des Véhicules (AFIREV)",
"path": "http://www.afirev.fr/fr/informations-generales/"
}
],
"created": "2018-06-29",
"lastModified": "2022-10-10",
"version": "v2.3.0",
"contributors": [
{
"title": "Alexandre Bulté",
"email": "validation@data.gouv.fr",
"organisation": "Etalab",
"role": "author"
},
{
"title": "Charles Nepote",
"email": "charles.nepote@fing.org",
"role": "contributor"
},
{
"title": "Pierre Dittgen",
"email": "pierre.dittgen@jailbreak.paris",
"organisation": "Jailbreak",
"role": "contributor"
},
{
"title": "Johan Richer",
"email": "johan.richer@jailbreak.paris",
"organisation": "Jailbreak",
"role": "contributor"
},
{
"title": "Geoffrey Aldebert",
"email": "geoffrey.aldebert@data.gouv.fr",
"organisation": "Etalab",
"role": "contributor"
},
{
"title": "Julien Barreteau",
"email": "julien.barreteau@developpement-durable.gouv.fr",
"organisation": "DGEC",
"role": "contributor"
},
{
"title": "Antoine Augusti",
"email": "antoine.augusti@transport.data.gouv.fr",
"organisation": "DINUM",
"role": "contributor"
},
{
"title": "Christina Laumond",
"email": "christina.laumond@transport.data.gouv.fr",
"organisation": "transport.data.gouv.fr",
"role": "contributor"
},
{
"title": "Thibaut Barrère",
"email": "thibaut.barrere@transport.data.gouv.fr",
"organisation": "transport.data.gouv.fr",
"role": "contributor"
},
{
"title": "Francis Chabouis",
"email": "francis.chabouis@beta.gouv.fr",
"organisation": "transport.data.gouv.fr",
"role": "contributor"
}
],
"fields": [
{
"name": "nom_amenageur",
"description": "La dénomination sociale du nom de l'aménageur, c'est à dire de l'entité publique ou privée propriétaire des infrastructures. Vous pouvez accéder à cette dénomination exacte sur le site annuaire-entreprises.data.gouv.fr. Ce champs n'est pas obligatoire car il sera automatiquement renseigné lors de la constitution du fichier global de consolidation des IRVE.",
"example": "Société X, Entité Y",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "siren_amenageur",
"description": "Le numero SIREN de l'aménageur issue de la base SIRENE des entreprises. Vous pouvez récupérer cet identifiant sur le site annuaire-entreprises.data.gouv.fr.",
"example": "130025265",
"type": "string",
"constraints": {
"required": false,
"pattern": "^\\d{9}$"
}
},
{
"name": "contact_amenageur",
"description": "Adresse courriel de l'aménageur. Favoriser les adresses génériques de contact. Cette adresse sera utilisée par les services de l'Etat en cas d'anomalie ou de besoin de mise à jour des données.",
"example": "contact@societe-amenageur.com",
"type": "string",
"format": "email",
"constraints": {
"required": false
}
},
{
"name": "nom_operateur",
"description": "La dénomination sociale de l'opérateur. L'opérateur est la personne qui exploite l'infrastructure de recharger pour le compte d'un aménageur dans le cadre d'un contrat ou pour son propre compte s'il est l'aménageur. Vous pouvez accéder à cette dénomination exacte sur le site annuaire-entreprises.data.gouv.fr.",
"example": "Société X, Entité Y",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "contact_operateur",
"description": "Adresse courriel de l'opérateur. Favoriser les adresses génériques de contact.",
"example": "contact@societe-operateur.com",
"type": "string",
"format": "email",
"constraints": {
"required": true
}
},
{
"name": "telephone_operateur",
"description": "Numéro de téléphone permettant de contacter l'opérateur.",
"example": "0111111111",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "nom_enseigne",
"description": "Le nom commercial du réseau.",
"example": "Réseau de recharge ABC",
"type": "string",
"constraints": {
"required": true
}
},
{
"name": "id_station_itinerance",
"description": "L'identifiant de la station délivré selon les modalités définies à l'article 10 du décret n° 2017-26 du 12 janvier 2017. Cet ID débute par FR suivi de 3 caractères délivrés par l'AFIREV, suivi de \"P\" pour \"pool\" qui veut dire \"station\" en anglais (https://afirev.fr/fr/informations-generales/). Ne pas ajouter les séparateurs *. Si la station n'est pas en itinérance, merci d'indiquer \"Non concerné\".",
"example": "FRA68P68021001",
"type": "string",
"constraints": {
"required": true,
"pattern": "(?:(?:^|,)(^[A-Z]{2}[A-Z0-9]{4,33}$|Non concerné))+$"
}
},
{
"name": "id_station_local",
"description": "Identifiant de la station utilisé localement. Si vous n'avez pas d'identifiant unique et que vous souhaitez en récupérer un, vous pouvez vous rendre sur https://heidi.app.etalab.studio. En cas de station qui n'est pas ouverte à l'itinérance, cet identifiant est indispensable.",
"example": "01F2KMMRZVQ5FQY882PCJQAPQ0",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "nom_station",
"description": "Le nom de la station.",
"example": "Picpus, Belleville, Villiers",
"type": "string",
"constraints": {
"required": true
}
},
{
"name": "implantation_station",
"description": "Le type d'implantation de la station. \nPour une station sur voirie, sélectionner \"Voirie\". \nPour un parking public, (en ouvrage, en enclos, etc.), sélectionner \"Parking public\". Ces parkings sont généralement en gestion directe ou déléguée de la collectivité territoriale. \nPour un parking privé à usage public / parking ouvert (ex: zone commerciale), sélectionner \"Parking privé à usage public\". \nPour un parking réservé à la clientèle, aux usagers, sélectionnez \"Parking privé réservé à la clientèle\" (il s'agit d'un choix de l'aménageur public ou privé sur l'utilisation de son parking. Ex: hôtel, commerce, éventuellement certains bâtiments publics). \nPour une station dédiée à la recharge rapide dont la durée d'utilisation est généralement limitée à la recharge principalement rapide et dont la première fonction de la station est la recharge et non le stationnement, sélectionner \"Station dédiée à la recharge rapide\" (ex : aires de service sur axes routiers, stations services, etc.).",
"example": "Parking public",
"type": "string",
"constraints": {
"required": true,
"enum": [
"Voirie",
"Parking public",
"Parking privé à usage public",
"Parking privé réservé à la clientèle",
"Station dédiée à la recharge rapide"
]
}
},
{
"name": "adresse_station",
"description": "L'adresse complète de la station : [numéro] [rue] [code postal] [ville].",
"example": "1 avenue de la Paix 75001 Paris",
"type": "string",
"constraints": {
"required": true
}
},
{
"name": "code_insee_commune",
"description": "Le code INSEE de la commune d'implantation.",
"example": "21231",
"type": "string",
"constraints": {
"pattern": "^([013-9]\\d|2[AB1-9])\\d{3}$",
"required": false
}
},
{
"name": "coordonneesXY",
"description": "La longitude suivie de la latitude en degrés décimaux (point comme séparateur décimal) de la localisation de la station exprimée dans le système de coordonnées WGS84 au format [lon,lat].",
"example": "[7.48710500,48.345345]",
"type": "geopoint",
"format": "array",
"constraints": {
"required": true
}
},
{
"name": "nbre_pdc",
"description": "Le nombre de points de recharge sur la station.",
"example": "3",
"type": "integer",
"constraints": {
"required": true,
"minimum": 0
}
},
{
"name": "id_pdc_itinerance",
"description": "L'identifiant du point de recharge délivré selon les modalités définies à l'article 10 du décret n° 2017-26 du 12 janvier 2017. Cet ID débute par FR suivi de 3 caractères délivrés par l'AFIREV, suivi de \"E\" pour l'équivalent du point de recharge en anglais EVSE - Electric Vehicule Supply Equipment (https://afirev.fr/fr/informations-generales/). Ne pas mettre de séparateur * ou -. Si le point de recharge n'est pas en itinérance, merci d'indiquer \"Non concerné\".",
"example": "FRA68E680210015",
"type": "string",
"constraints": {
"required": true,
"pattern": "(?:(?:^|,)(^[A-Z]{2}[A-Z0-9]{4,33}$|Non concerné))+$"
}
},
{
"name": "id_pdc_local",
"description": "Identifiant du point de recharge utilisé localement. Si vous n'avez pas d'identifiant unique et que vous souhaitez en récupérer un, vous pouvez vous rendre sur https://heidi.app.etalab.studio. En cas de point de recharge qui n'est pas ouvert à l'itinérance, cet identifiant est indispensable.",
"example": "01F2KNFARDSJG7KEH1YHG4033M",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "puissance_nominale",
"description": "Puissance maximale en kW que peut recevoir un véhicule connecté au point de recharge, déterminée en prenant en compte les capacités techniques propres du point, la puissance souscrite au réseau de distribution et les caractéristiques de l'installation comme le câblage par exemple, mais sans prendre en compte ni les limitations du connecteur ni celles du véhicule.",
"example": "22.00",
"type": "number",
"constraints": {
"required": true,
"minimum": 0
}
},
{
"name": "prise_type_ef",
"description": "Disponibilité d'une prise de type E/F sur le point de recharge. Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "prise_type_2",
"description": "Disponibilité d'une prise de type 2 sur le point de recharge. Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "prise_type_combo_ccs",
"description": "Disponibilité d'une prise de type Combo / CCS sur le point de recharge. Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "prise_type_chademo",
"description": "Disponibilité d'une prise de type Chademo sur le point de recharge. Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "prise_type_autre",
"description": "Disponibilité d'une prise d'un autre type sur le point de recharge. Indiquer \"true\" si vrai, \"false\" si faux. Le ou les types de prises sont à préciser dans le champs \"observations\".",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "gratuit",
"description": "Gratuité de la recharge. Indiquer \"true\" si le point de recharge est gratuit sans condition d'utilisation, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": false
}
},
{
"name": "paiement_acte",
"description": "Possibilité de paiement à l'acte (sans identification ni abonnement). Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "paiement_cb",
"description": "Possibilité de paiement par carte bancaire (présence d'un terminal de paiement avec une CB). Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": false
}
},
{
"name": "paiement_autre",
"description": "Possibilité de paiement par un autre moyen (qui peut être précisé dans le champs \"observation\". Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": false
}
},
{
"name": "tarification",
"description": "Toutes informations pouvant être apportées concernant les tarification(s) pratiquée(s).",
"example": "0,40€ / kwh pour les non abonnés.",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "condition_acces",
"description": "Éventuelles conditions d’accès à la station, hors gabarit. Dans le cas d'un accès libre sans contrainte matérielle physique (ex : absence de barrière) ni restriction d'usager (ex : borne accessible pour n'importe quel type et modèle de voiture électrique), indiquer \"Accès libre\". \nDans le cas d'un accès limité / réservé qui nécessite une identification ou passage d'une barrière, indiquer \"Accès réservé\" (ce type d'accès inclut les IRVE sur le réseau autoroutier payant - passage péage).",
"example": "Accès libre",
"type": "string",
"constraints": {
"required": true,
"enum": [
"Accès libre",
"Accès réservé"
]
}
},
{
"name": "reservation",
"description": "Possibilité de réservation à l'avance d'un point de recharge. Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "horaires",
"description": "Amplitude d’ouverture de la station. Si ouverte sans interruption indiquer « 24/7» sinon indiquer les horaires sous cette forme : Mo-Fr 08:00-12:00,Mo-Fr 14:00-18:00,Th 08:00-18:00.",
"example": "Mo-Fr 08:00-12:00,Mo-Fr 14:00-18:00,Th 08:00-18:00",
"type": "string",
"constraints": {
"required": true,
"pattern": "(.*?)((\\d{1,2}:\\d{2})-(\\d{1,2}:\\d{2})|24/7)"
}
},
{
"name": "accessibilite_pmr",
"description": "Accessibilité du point de recharge aux personnes à mobilité réduite. Dans le cas d'un point de recharge signalisé et réservé PMR, indiquer \"Réservé PMR\". \nDans le cas d'une point de recharge non réservé PMR mais accessible PMR, indiquer \"Accessible mais non réservé PMR\". \nDans le cas d'un point de recharge non accessible PMR, indiquer \"Non accessible\"",
"example": "Accessible mais non réservé PMR",
"type": "string",
"constraints": {
"required": true,
"enum": [
"Réservé PMR",
"Accessible mais non réservé PMR",
"Non accessible",
"Accessibilité inconnue"
]
}
},
{
"name": "restriction_gabarit",
"description": "Toutes informations relatives aux restrictions d’accès liées au gabarit des véhicules.",
"example": "Hauteur maximale 2m",
"type": "string",
"constraints": {
"required": true
}
},
{
"name": "station_deux_roues",
"description": "La station est-elle réservée aux véhicules à deux roues ? Indiquer \"true\" si vrai, \"false\" si faux.",
"example": false,
"type": "boolean",
"constraints": {
"required": true
}
},
{
"name": "raccordement",
"description": "Type de raccordement de la station au réseau de distribution d'électricité : direct (point de livraison exclusivement dédié à la station) ou indirect.",
"example": "Direct",
"type": "string",
"constraints": {
"required": false,
"enum": [
"Direct",
"Indirect"
]
}
},
{
"name": "num_pdl",
"description": "Numéro du point de livraison d'électricité, y compris en cas de raccordement indirect. Dans le cas d'un territoire desservi par ENEDIS, ce numéro doit compoter 14 chiffres.",
"example": "12345678912345",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "date_mise_en_service",
"description": "Date de mise en service de la station",
"example": "2021-12-30",
"type": "date",
"format": "%Y-%m-%d",
"constraints": {
"required": false
}
},
{
"name": "observations",
"description": "Champ destiné à préciser les modalités d'accès à la recharge, l'accessibilité, le tarif, les horaires d'ouverture, ...",
"example": "Recharge uniquement disponible pendant les horaires d'ouverture du Centre Commercial XY",
"type": "string",
"constraints": {
"required": false
}
},
{
"name": "date_maj",
"description": "Date de mise à jour des données",
"example": "2021-12-30",
"type": "date",
"format": "%Y-%m-%d",
"constraints": {
"required": true
}
},
{
"name": "cable_t2_attache",
"description": "Champ destiné à préciser si un câble T2 est attaché au point de recharge ou non. Indiquer \"true\" si vrai, \"false\" si faux",
"example": "false",
"type": "boolean",
"constraints": {
"required": false
}
}
],
"missingValues": [
""
]
}
================================================
FILE: apps/shared/mix.exs
================================================
defmodule Shared.MixProject do
use Mix.Project
def project do
[
app: :shared,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
listeners: [Phoenix.CodeReloader]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application do
[
mod: {Shared.Application, []},
extra_applications: [:logger]
]
end
defp deps do
[
{:timex, ">= 0.0.0"},
{:httpoison, ">= 0.0.0"},
{:req, "~> 0.5"},
{:bypass, "~> 2.1", only: :test},
{:mox, "~> 1.1", only: :test},
# Mint is used by our HttpStream shared component, so we add an explicity dependency
{:mint, "~> 1.2"},
# Finch is used for built-in streaming
{:finch, "~> 0.8"},
# Required for the ConditionalJSONEncoder shared component, but
# there is probably a way to avoid that?
{:phoenix, "~> 1.8.3"},
# The global app config references Sentry.LoggerBackend. We add it in "shared"
# as an implicit dependency, to ensure `Sentry.LoggerBackend` is always defined,
# otherwise running tests for an individual umbrella sub-app will raise error.
# A better way to achieve this will be to configure it at runtime, like described
# in https://github.com/getsentry/sentry-elixir/pull/472.
{:sentry, "~> 11.0"},
# Similarly, Jason is configured as `json_library` by the main app, so it will
# be required no matter what.
{:jason, ">= 0.0.0"},
{:ex_cldr_numbers, "~> 2.0"},
{:ex_cldr_calendars, "~> 2.4"},
{:ex_cldr_lists, "~> 2.11"},
{:ex_cldr_units, "~> 3.17"},
{:ex_cldr_dates_times, "~> 2.0"},
{:ex_aws, "~> 2.1"},
{:ex_aws_s3, "~> 2.0"},
{:cachex, "~> 4.1"},
{:ex_json_schema, "~> 0.10"},
# added because of `TransportWeb.Plugs.AppSignalFilter`
{:appsignal, "~> 2.0"},
{:appsignal_phoenix, "~> 2.0"},
{:dialyxir, "~> 1.2", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.10", only: :test}
]
end
end
================================================
FILE: apps/shared/test/data_visualization_test.exs
================================================
defmodule Transport.DataVisualizationTest do
use ExUnit.Case
alias Transport.DataVisualization
doctest Transport.DataVisualization
setup do
Mox.stub_with(Transport.DataVisualization.Mock, Transport.DataVisualization.Impl)
:ok
end
@validations %{
"ExcessiveSpeed" => [
%{
"details" => "computed speed between the stops is 325858.52 km/h (5430975 m travelled in 60 seconds)",
"geojson" => %{
"features" => [
%{
"geometry" => %{
"coordinates" => [2.449186, 48.796058],
"type" => "Point"
},
"properties" => %{"id" => "near1", "name" => "Near1"},
"type" => "Feature"
},
%{
"geometry" => %{"coordinates" => [0.0, 0.0], "type" => "Point"},
"properties" => %{"id" => "null", "name" => "Null Island"},
"type" => "Feature"
},
%{
"geometry" => %{
"coordinates" => [[2.449186, 48.796058], [0.0, 0.0]],
"type" => "LineString"
},
"properties" => %{
"details" => "computed speed between the stops is 325858.52 km/h (5430975 m travelled in 60 seconds)"
},
"type" => "Feature"
}
],
"type" => "FeatureCollection"
},
"issue_type" => "ExcessiveSpeed",
"object_id" => "near1",
"object_name" => "Near1",
"object_type" => "Stop",
"related_objects" => [
%{"id" => "null", "name" => "Null Island", "object_type" => "Stop"},
%{"id" => "route1", "name" => "100", "object_type" => "Route"}
],
"severity" => "Information"
}
]
}
describe "test the data visualization creation" do
test "simple data vis from validation only" do
# We want to make sure the new method to create the data_vis
# from the validation only has an output similar to the previous method
data_vis = DataVisualization.validation_data_vis(@validations)
assert data_vis |> Map.keys() == ["ExcessiveSpeed"]
excessive_speed = data_vis |> Map.fetch!("ExcessiveSpeed")
assert excessive_speed |> Map.keys() == ["geojson", "severity"]
features = excessive_speed |> Map.fetch!("geojson") |> Map.fetch!("features")
assert features |> Enum.frequencies_by(fn %{"geometry" => %{"type" => type}} -> type end) == %{
"Point" => 2,
"LineString" => 1
}
[line_string] = features |> Enum.filter(fn %{"geometry" => %{"type" => type}} -> type == "LineString" end)
assert line_string["properties"]["details"] |> String.contains?("computed speed between the stops")
end
end
end
================================================
FILE: apps/shared/test/date_time_display_test.exs
================================================
defmodule Shared.DateTimeDisplayTest do
use ExUnit.Case
doctest Shared.DateTimeDisplay, import: true
end
================================================
FILE: apps/shared/test/hasher_test.exs
================================================
defmodule HasherTest do
use ExUnit.Case, async: true
import Transport.Test.TestUtils, only: [zip_metadata: 0]
doctest Hasher, import: true
@expected_hash zip_metadata()
|> Enum.map(&(&1["file_name"] <> &1["sha256"]))
|> Enum.sort()
|> Hasher.compute_checksum(:sha256)
test "test data is stable" do
assert @expected_hash == "5609a4551776836f9402cf8f37cc75a8fc115999fc038dad0b967cf9c5bd134f"
end
test "hash by streaming a local file" do
content = "coucou"
hash = :sha256 |> :crypto.hash(content) |> Base.encode16() |> String.downcase()
file_path = System.tmp_dir!() |> Path.join("coucou_file")
File.write!(file_path, content)
assert Hasher.get_file_hash(file_path) == hash
end
describe "hashing zip metadata" do
test "it works" do
assert Hasher.zip_hash(zip_metadata()) == @expected_hash
end
test "can shuffle zip files" do
assert Hasher.zip_hash(Enum.shuffle(zip_metadata())) == @expected_hash
end
test "can handle keys with atoms or strings" do
assert Map.has_key?(zip_metadata() |> Enum.random(), "sha256")
zip_metadata_atom_keys = zip_metadata() |> Enum.map(&to_atom_keys(&1))
assert Map.has_key?(zip_metadata_atom_keys |> Enum.random(), :sha256)
assert Hasher.zip_hash(zip_metadata_atom_keys) == @expected_hash
end
test "hash changes" do
refute Hasher.zip_hash(zip_metadata() |> Enum.take(2)) == @expected_hash
end
end
defp to_atom_keys(map) do
map |> Map.new(fn {k, v} -> {String.to_atom(k), v} end)
end
end
================================================
FILE: apps/shared/test/helpers/helpers_test.exs
================================================
defmodule Helpers.HelpersTest do
use ExUnit.Case
doctest Helpers, import: true
test "last_updated" do
assert nil == Helpers.last_updated([])
assert "2023-01-15T05:33:47Z" ==
Helpers.last_updated([
%DB.Resource{last_update: ~U[2023-01-15 05:33:47Z]},
%DB.Resource{last_update: ~U[2022-03-30 05:33:47Z]}
])
end
end
================================================
FILE: apps/shared/test/http_stream_v2_test.exs
================================================
defmodule HTTPStreamV2.Test do
use ExUnit.Case, async: true
setup do
bypass = Bypass.open()
{:ok, bypass: bypass}
end
test "streams the content and compute expected information", %{bypass: bypass} do
Bypass.expect_once(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.put_resp_header("hello", "header")
|> Plug.Conn.resp(200, "Contenu classique")
end)
url = "http://localhost:#{bypass.port}/"
{:ok, result} = HTTPStreamV2.fetch_status_and_hash(url)
assert result.status == 200
assert result.hash == :sha256 |> :crypto.hash("Contenu classique") |> Base.encode16() |> String.downcase()
assert result.body_byte_size == "Contenu classique" |> byte_size()
headers =
result.headers
|> Enum.filter(fn {key, _val} -> key == "hello" end)
assert headers == [{"hello", "header"}]
end
test "streams the content and compute expected information after a redirect", %{bypass: bypass} do
url = "http://localhost:#{bypass.port}/"
# the content is accessible after 2 successive redirects
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.put_resp_header("location", "#{url}page1")
|> Plug.Conn.resp(301, "")
end)
Bypass.expect(bypass, "GET", "/page1", fn conn ->
conn
|> Plug.Conn.put_resp_header("location", "#{url}page2")
|> Plug.Conn.resp(301, "")
end)
Bypass.expect(bypass, "GET", "/page2", fn conn ->
conn
|> Plug.Conn.put_resp_header("hello", "header")
|> Plug.Conn.resp(200, "hello world")
end)
{:ok, result} = HTTPStreamV2.fetch_status_and_hash(url)
assert result.status == 200
assert result.hash == :sha256 |> :crypto.hash("hello world") |> Base.encode16() |> String.downcase()
assert result.body_byte_size == "hello world" |> byte_size()
headers =
result.headers
|> Enum.filter(fn {key, _val} -> key == "hello" end)
assert headers == [{"hello", "header"}]
# we get an error if there are two many redirects
{:error, "maximum number of redirect reached"} = HTTPStreamV2.fetch_status_and_hash(url, _max_redirect = 1)
end
test "hashing works with url containing caracaters that need to be encoded", %{bypass: bypass} do
# "|" needs to be encoded or we'll get a %Mint.HTTPError
url = "http://localhost:#{bypass.port}/?ville=paris|berlin"
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.resp(200, "2 belles villes")
end)
{:ok, result} = HTTPStreamV2.fetch_status_and_hash(url)
assert result.status == 200
end
describe "get a request status by streaming it" do
test "simple 200 response", %{bypass: bypass} do
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.put_resp_header("hello", "header")
|> Plug.Conn.resp(200, "Contenu de la réponse")
end)
url = "http://localhost:#{bypass.port}/"
result = HTTPStreamV2.fetch_status(url)
assert result == {:ok, %{status: 200}}
result_follow_redirect = HTTPStreamV2.fetch_status_follow_redirect(url)
assert result_follow_redirect == {:ok, 200}
end
test "redirect response", %{bypass: bypass} do
url = "http://localhost:#{bypass.port}/"
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.put_resp_header("Location", "#{url}here")
|> Plug.Conn.resp(301, "va voir ailleurs si j'y suis")
end)
Bypass.expect_once(bypass, "GET", "/here", fn conn ->
conn
|> Plug.Conn.resp(200, "gagné")
end)
result = HTTPStreamV2.fetch_status(url)
assert result == {:ok, %{status: 301, location: "#{url}here"}}
# get the status following redirection
result_follow_redirect = HTTPStreamV2.fetch_status_follow_redirect(url)
assert result_follow_redirect == {:ok, 200}
end
test "more redirects than allowed", %{bypass: bypass} do
url = "http://localhost:#{bypass.port}/"
# setup a test with 2 successive redirects
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.put_resp_header("Location", "#{url}1")
|> Plug.Conn.resp(301, "")
end)
Bypass.expect(bypass, "GET", "/1", fn conn ->
conn
|> Plug.Conn.put_resp_header("Location", "#{url}2")
|> Plug.Conn.resp(301, "")
end)
Bypass.expect(bypass, "GET", "/2", fn conn ->
conn
|> Plug.Conn.resp(404, "")
end)
# test with 1 redirect allowed
assert {:error, "maximum number of redirect reached"} == HTTPStreamV2.fetch_status_follow_redirect(url, 1)
# test with 2 redirects allowed
assert {:ok, 404} == HTTPStreamV2.fetch_status_follow_redirect(url, 2)
end
test "redirect response, but location header not provided", %{bypass: bypass} do
Bypass.expect(bypass, "GET", "/", fn conn ->
conn
|> Plug.Conn.resp(301, "Mais ou est le header ?")
end)
url = "http://localhost:#{bypass.port}/"
assert {:ok, 301} == HTTPStreamV2.fetch_status_follow_redirect(url)
end
end
end
================================================
FILE: apps/shared/test/resource_schema_test.exs
================================================
defmodule Transport.Shared.ResourceSchemaTest do
use ExUnit.Case, async: true
doctest Transport.Shared.ResourceSchema, import: true
end
================================================
FILE: apps/shared/test/s3_test.exs
================================================
defmodule Transport.S3Test do
use ExUnit.Case, async: true
import Mox
setup :verify_on_exit!
test "bucket_name" do
expected = "transport-data-gouv-fr-resource-history-test"
assert expected == Transport.S3.bucket_name(:history)
assert_raise KeyError, fn ->
Transport.S3.bucket_name(:foo)
end
end
describe "permanent_url" do
@bucket_name Transport.S3.bucket_name(:history)
test "no path" do
assert "https://#{@bucket_name}.cellar-c2.services.clever-cloud.com" == Transport.S3.permanent_url(:history)
end
test "with path" do
assert "https://#{@bucket_name}.cellar-c2.services.clever-cloud.com/foo/bar.zip" ==
Transport.S3.permanent_url(:history, "foo/bar.zip")
end
end
end
================================================
FILE: apps/shared/test/support/cache_case.ex
================================================
defmodule Shared.CacheCase do
@moduledoc """
This module defines the test case to be used by
tests that require access to the Cachex cache.
"""
use ExUnit.CaseTemplate
using do
quote do
use ExUnit.Case, async: false
import Shared.Application, only: [cache_name: 0]
import Mox
setup :verify_on_exit!
setup :set_mox_from_context
setup do
Cachex.clear(cache_name())
on_exit(fn -> Cachex.clear(cache_name()) end)
end
def assert_cache_key_has_ttl(cache_key, expected_ttl \\ 300) do
assert_in_delta Cachex.ttl!(cache_name(), cache_key), :timer.seconds(expected_ttl), :timer.seconds(1)
end
end
end
end
================================================
FILE: apps/shared/test/support/mocks.ex
================================================
Mox.defmock(Transport.HTTPoison.Mock, for: HTTPoison.Base)
Mox.defmock(Transport.Req.Mock, for: Transport.Req.Behaviour)
Mox.defmock(Shared.Validation.GBFSValidator.Mock, for: Shared.Validation.GBFSValidator.Wrapper)
Mox.defmock(Transport.DataVisualization.Mock, for: Transport.DataVisualization)
================================================
FILE: apps/shared/test/support/s3_test_utils.ex
================================================
defmodule Transport.Test.S3TestUtils do
@moduledoc """
Some utility functions for S3 mocks
"""
import Mox
import ExUnit.Assertions
@doc """
Returns a list of existing bucket names
"""
def s3_mock_list_buckets(bucket_names \\ []) do
Transport.ExAWS.Mock
|> expect(:request!, fn request ->
assert(request.service == :s3)
assert(request.http_method == :get)
assert(request.path == "/")
%{body: %{buckets: bucket_names |> Enum.map(&%{name: &1})}}
end)
end
def s3_mock_stream_file(start_path: expected_start_path, bucket: expected_bucket) do
Transport.ExAWS.Mock
|> expect(:request!, fn %ExAws.S3.Upload{
src: %File.Stream{},
bucket: ^expected_bucket,
path: path,
opts: [acl: :public_read],
service: :s3
} ->
assert String.starts_with?(path, expected_start_path)
end)
end
def s3_mock_stream_file(
start_path: expected_start_path,
bucket: expected_bucket,
acl: expected_acl,
file_content: expected_file_content
) do
Transport.ExAWS.Mock
|> expect(:request!, fn %ExAws.S3.Upload{
src: src = %File.Stream{},
bucket: ^expected_bucket,
path: path,
opts: [acl: ^expected_acl],
service: :s3
} ->
assert String.starts_with?(path, expected_start_path)
assert src |> Enum.join("\n") == expected_file_content
end)
end
def s3_mock_stream_file(
path: expected_path,
bucket: expected_bucket,
acl: expected_acl,
file_content: expected_file_content
) do
Transport.ExAWS.Mock
|> expect(:request!, fn %ExAws.S3.Upload{
src: src = %File.Stream{},
bucket: ^expected_bucket,
path: ^expected_path,
opts: [acl: ^expected_acl],
service: :s3
} ->
assert src |> Enum.join("\n") == expected_file_content
:ok
end)
end
def s3_mocks_delete_object(expected_bucket, expected_path) do
Transport.ExAWS.Mock
|> expect(:request!, fn %ExAws.Operation.S3{
bucket: ^expected_bucket,
path: ^expected_path,
http_method: :delete,
service: :s3
} ->
:ok
end)
end
def s3_mocks_remote_copy_file(expected_bucket, expected_src_path, expected_dest_path) do
Transport.ExAWS.Mock
|> expect(:request!, fn %ExAws.Operation.S3{
bucket: ^expected_bucket,
path: ^expected_dest_path,
http_method: :put,
service: :s3,
headers: headers
} ->
assert Map.get(headers, "x-amz-copy-source") =~ "/#{expected_bucket}/#{expected_src_path}"
%{body: %{}}
end)
end
end
================================================
FILE: apps/shared/test/support/test_utils.ex
================================================
defmodule Transport.Test.TestUtils do
@moduledoc """
Some useful functions for testing
"""
@doc """
Polls `fun` until it returns a truthy value, or fails the test after `timeout_ms`.
Useful as a sync barrier in tests where work happens in async tasks.
"""
def wait_until(fun, timeout_ms \\ 2_000, interval_ms \\ 10) do
deadline = System.monotonic_time(:millisecond) + timeout_ms
do_wait_until(fun, deadline, interval_ms, timeout_ms)
end
defp do_wait_until(fun, deadline, interval_ms, timeout_ms) do
if fun.() do
:ok
else
if System.monotonic_time(:millisecond) > deadline do
ExUnit.Assertions.flunk("wait_until: condition not met within #{timeout_ms}ms")
else
Process.sleep(interval_ms)
do_wait_until(fun, deadline, interval_ms, timeout_ms)
end
end
end
def ensure_no_tmp_files!(file_prefix) do
tmp_files = System.tmp_dir!() |> File.ls!()
ExUnit.Assertions.assert(
tmp_files |> Enum.filter(fn f -> String.starts_with?(f, file_prefix) end) |> Enum.empty?(),
"tmp files found in #{System.tmp_dir!()}"
)
end
def zip_metadata do
# Metadata for shared/test/validation/gtfs.zip
[
%{
"compressed_size" => 41,
"file_name" => "ExportService.checksum.md5",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "f0c7216411dec821330ffbebf939bfe73a50707f5e443795a122ec7bef37aa16",
"uncompressed_size" => 47
},
%{
"compressed_size" => 115,
"file_name" => "agency.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "548de694a86ab7d6ac0cd3535b0c3b8bffbabcc818e8d7f5a4b8f17030adf617",
"uncompressed_size" => 143
},
%{
"compressed_size" => 179,
"file_name" => "calendar.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "390c446ee520bc63c49f69da16d4fe08bceb0511ff19f8491315b739a60f61d6",
"uncompressed_size" => 495
},
%{
"compressed_size" => 215,
"file_name" => "calendar_dates.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "4779cd26ddc1d44c8544cb1be449b0f6b48b65fe8344861ee46bcfa3787f9ba7",
"uncompressed_size" => 1197
},
%{
"compressed_size" => 82,
"file_name" => "routes.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "27eadc95f783e85c352c9b6b75cc896d9afd236c58c332597a1fac1c14c1f855",
"uncompressed_size" => 102
},
%{
"compressed_size" => 1038,
"file_name" => "stop_times.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "dc452a69b86b07841d5de49705ceea22340d639eebfd6589b379d1b38b9b9da1",
"uncompressed_size" => 5128
},
%{
"compressed_size" => 251,
"file_name" => "stops.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "2685fb16434b396f277c7ad593b609574ed01592b48de7001c53beb36b926eca",
"uncompressed_size" => 607
},
%{
"compressed_size" => 71,
"file_name" => "transfers.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "269d48635624c4b46968cb649fc5a5a1c2224c2dac1670aa6082516ca0c50f59",
"uncompressed_size" => 102
},
%{
"compressed_size" => 378,
"file_name" => "trips.txt",
"last_modified_datetime" => "2017-02-16T05:01:12",
"sha256" => "dd79f0fb8d2fd0a70cc75f49c5f2cae56b9b2ef83670992d6b195e9806393c24",
"uncompressed_size" => 2864
}
]
end
end
================================================
FILE: apps/shared/test/test_helper.exs
================================================
ExUnit.start()
================================================
FILE: apps/shared/test/time_wrapper_test.exs
================================================
defmodule TimeWrapperTest do
use ExUnit.Case, async: true
doctest TimeWrapper
end
================================================
FILE: apps/shared/test/validation/gbfs_validator_test.exs
================================================
defmodule GBFSValidatorTest do
use ExUnit.Case, async: true
alias Shared.Validation.GBFSValidator.{HTTPValidatorClient, Summary}
import Mox
setup :verify_on_exit!
test "validate GBFS feed" do
Transport.HTTPoison.Mock
|> expect(:post, fn url, body, headers, [recv_timeout: 15_000] ->
assert %{"url" => "https://example.com/gbfs.json"} = Jason.decode!(body)
assert [
{"content-type", "application/json"},
{"user-agent", "contact@transport.data.gouv.fr"}
] == headers
assert String.starts_with?(url, "https://gbfs-validator.netlify.app")
{:ok,
%HTTPoison.Response{
status_code: 200,
body: """
{"summary":{"version":{"detected":"1.1","validated":"1.1"},"hasErrors":false,"errorsCount":0,"validatorVersion":"31c5325"}}
""",
headers: [{"Content-Type", "application/json"}]
}}
end)
expected = %Summary{
errors_count: 0,
has_errors: false,
version_detected: "1.1",
version_validated: "1.1",
validator_version: "31c5325",
validator: Shared.Validation.GBFSValidator.HTTPValidatorClient
}
assert {:ok, ^expected} = HTTPValidatorClient.validate("https://example.com/gbfs.json")
end
test "on invalid server response" do
Transport.HTTPoison.Mock
|> expect(:post, fn _url, _, _, [recv_timeout: 15_000] -> {:ok, %HTTPoison.Response{status_code: 500}} end)
{{:error, error}, logs} =
ExUnit.CaptureLog.with_log(fn -> HTTPValidatorClient.validate("https://example.com/gbfs.json") end)
assert String.starts_with?(error, "impossible to query GBFS Validator")
assert logs =~ "impossible to query GBFS Validator"
end
test "validators send nil validation result" do
Transport.HTTPoison.Mock
|> expect(:post, fn url, body, headers, [recv_timeout: 15_000] ->
assert %{"url" => "https://example.com/gbfs.json"} = Jason.decode!(body)
assert [
{"content-type", "application/json"},
{"user-agent", "contact@transport.data.gouv.fr"}
] == headers
assert String.starts_with?(url, "https://gbfs-validator.netlify.app")
{:ok,
%HTTPoison.Response{
status_code: 200,
body: Jason.encode!(%{summary: %{errorsCount: nil}}),
headers: [{"Content-Type", "application/json"}]
}}
end)
{{:error, "impossible to query GBFS Validator: {:has_errors_count, false}"}, logs} =
ExUnit.CaptureLog.with_log(fn -> HTTPValidatorClient.validate("https://example.com/gbfs.json") end)
assert logs =~ "[error] impossible to query GBFS Validator: {:has_errors_count, false}"
end
test "can encode and decode summary" do
encoded_summary =
Jason.encode!(%Summary{
errors_count: 0,
has_errors: false,
version_detected: "1.1",
version_validated: "1.1",
validator_version: "31c5325",
validator: :validator_module
})
assert Jason.decode!(encoded_summary) == %{
"errors_count" => 0,
"has_errors" => false,
# NOTE: the serialized atom does not come back as an atom
"validator" => "validator_module",
"validator_version" => "31c5325",
"version_detected" => "1.1",
"version_validated" => "1.1"
}
end
end
================================================
FILE: apps/shared/test/validation/gtfs_validator_test.exs
================================================
defmodule GtfsValidatorTest do
use ExUnit.Case, async: true
doctest Shared.Validation.GtfsValidator
import Mox
alias Shared.Validation.GtfsValidator
setup :verify_on_exit!
test "validate gtfs zip file" do
expected_validation_report = %{"text" => "GTFS is great"}
create_gtfs()
|> tap(&expect_validator_called_with_gtfs_and_return_report(&1, expected_validation_report))
|> GtfsValidator.validate()
|> assert_validation_report_is(expected_validation_report)
end
test "validate gtfs url" do
gtfs_url = "http://my-domain.com/gtfs.zip"
expected_validation_report = %{"text" => "GTFS is great"}
expect_validator_called_with_gtfs_url_and_return_report(gtfs_url, expected_validation_report)
gtfs_url
|> GtfsValidator.validate_from_url()
|> assert_validation_report_is(expected_validation_report)
end
test "with a timeout" do
gtfs_url = "http://example.com/gtfs.zip"
expected_url = "https://validation.transport.data.gouv.fr/validate?url=#{URI.encode_www_form(gtfs_url)}"
Transport.HTTPoison.Mock
|> expect(:get, fn ^expected_url, [], [recv_timeout: 180_000] ->
{:error, %HTTPoison.Error{reason: :timeout}}
end)
assert {:error, "Error while requesting GTFS validator"} == GtfsValidator.validate_from_url(gtfs_url)
end
defp assert_validation_report_is({:ok, obtained_validation_report}, expected_validation_report),
do: assert(obtained_validation_report == expected_validation_report)
defp create_gtfs, do: File.read!("#{__DIR__}/gtfs.zip")
defp expect_validator_called_with_gtfs_and_return_report(_gtfs, expected_validation_report),
do:
Transport.HTTPoison.Mock
|> expect(
:post,
fn "https://validation.transport.data.gouv.fr/validate", _gtfs, _, _ ->
{:ok, %{status_code: 200, body: Jason.encode!(expected_validation_report)}}
end
)
defp expect_validator_called_with_gtfs_url_and_return_report(gtfs_url, expected_validation_report),
do:
Transport.HTTPoison.Mock
|> expect(
:get,
fn obtained_validator_url, _, _ ->
# Le validateur doit être appelé en passant le lien de téléchargement du fichier GTFS
expected_validator_url =
"https://validation.transport.data.gouv.fr/validate?url=#{URI.encode_www_form(gtfs_url)}"
assert obtained_validator_url == expected_validator_url
{:ok, %{status_code: 200, body: Jason.encode!(expected_validation_report)}}
end
)
end
================================================
FILE: apps/transport/client/.prettierignore
================================================
node_modules/
priv/
build/
dist/
yarn.lock
package.json
# styles handled by stylelint, not prettier
*.css
*.scss
# build configs: keep diffs readable, not worth the formatting churn
webpack.*.js
================================================
FILE: apps/transport/client/.prettierrc.json
================================================
{
"tabWidth": 4,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"arrowParens": "avoid",
"printWidth": 120
}
================================================
FILE: apps/transport/client/eslint.config.mjs
================================================
import js from '@eslint/js'
import globals from 'globals'
import prettier from 'eslint-config-prettier/flat'
export default [
js.configs.recommended,
{
languageOptions: {
ecmaVersion: 'latest',
sourceType: 'module',
globals: {
...globals.browser,
...globals.node,
opts: 'readonly'
}
},
rules: {
'no-unused-vars': [
'error',
{
args: 'after-used',
argsIgnorePattern: '^_',
caughtErrorsIgnorePattern: '^_',
varsIgnorePattern: '^_'
}
]
}
},
prettier,
{
ignores: ['node_modules/', 'priv/', 'build/', 'dist/']
}
]
================================================
FILE: apps/transport/client/javascripts/app.js
================================================
import { Socket } from 'phoenix'
import { LiveSocket } from 'phoenix_live_view'
import Prism from 'prismjs'
import format from 'xml-formatter'
const Hooks = {}
Hooks.SyntaxColoring = {
mounted() {
this.updated()
},
updated() {
const element = this.el
const target = document.getElementById(element.dataset.code)
try {
target.textContent = format(element.value, {
indentation: ' ',
filter: node => node.type !== 'Comment',
collapseContent: true,
lineSeparator: '\n'
})
} catch (_) {
/* in some cases, the returned content is not XML, in which case the
attempt to format fails. We use a catch-all exception to make sure
we still display the response properly */
target.textContent = element.value
}
Prism.highlightElement(target)
}
}
Hooks.TextareaAutoexpand = {
mounted() {
this.el.addEventListener('input', event => {
event.target.parentNode.dataset.replicatedValue = event.target.value
})
}
}
window.addEventListener('phx:backoffice-form-reset', () => {
document.getElementById('custom_tag').value = ''
})
window.addEventListener('phx:backoffice-form-owner-reset', () => {
document.getElementById('js-owner-input').value = ''
})
window.addEventListener('phx:backoffice-form-spatial-areas-reset', () => {
document.getElementById('spatial_areas_search_input').value = ''
})
window.addEventListener('phx:backoffice-form-offer-reset', () => {
document.getElementById('js-offer-input').value = ''
})
window.addEventListener('phx:backoffice-form-dataset-subtypes-reset', () => {
document.getElementById('js-dataset-subtype-input').value = ''
})
window.addEventListener('phx:gtfs-diff:scroll-to-steps', () => {
document.getElementById('gtfs-diff-steps').parentElement.scrollIntoView({ behavior: 'smooth' })
})
const csrfToken = document.querySelector("meta[name='csrf']").getAttribute('content')
const liveSocket = new LiveSocket('/live', Socket, { hooks: Hooks, params: { _csrf_token: csrfToken } })
liveSocket.connect()
// Track analytics events for DOM elements by a `data-tracking-category`.
// The event will be recorded on a click event
// See https://matomo.org/faq/reports/implement-event-tracking-with-matomo/#how-to-set-up-matomo-event-tracking-with-javascript
document.querySelectorAll('[data-tracking-category]').forEach(el => {
el.addEventListener('click', function (event) {
const target = event.target
const name = target.dataset.trackingName || ''
window._paq.push(['trackEvent', target.dataset.trackingCategory, target.dataset.trackingAction, name])
})
})
// expose liveSocket on window for web console debug logs and latency simulation:
// >> liveSocket.enableDebug()
// >> liveSocket.enableLatencySim(1000) // enabled for duration of browser session
// >> liveSocket.disableLatencySim()
// window.liveSocket = liveSocket
================================================
FILE: apps/transport/client/javascripts/autocomplete.js
================================================
/* eslint no-unused-vars: [2, {"args": "after-used", "varsIgnorePattern": "autoCompletejs"}] */
/* global contactId, labels */
// https://github.com/babel/babel/issues/9849
require('regenerator-runtime')
const AutoComplete = require('@tarekraafat/autocomplete.js/dist/autoComplete')
document.onkeydown = function (evt) {
evt = evt || window.event
if (evt.key === 'Escape' || evt.key === 'Esc') {
document.querySelector('#autoComplete').value = ''
document.querySelector('#autoComplete_list').innerHTML = ''
}
}
const autoCompletejs = new AutoComplete({
data: {
src: async () => {
const query = document.querySelector('#autoComplete').value
const source = await fetch(`/api/autocomplete?q=${query}`)
let data = await source.json()
data = [
{
name: labels['search-description'].replace('$query', query),
value: query,
type: 'description',
position: 1,
url: `/datasets?q=${query}`
},
...data.map((el, index) => {
return {
...el,
value: el.name,
position: index + 2
}
})
]
return data
},
key: ['name'],
cache: false
},
selector: '#autoComplete',
threshold: 1,
debounce: 200,
highlight: true,
searchEngine: (query, record) => {
record = record.name
// inspired by the 'loose' searchEngine, but that always matches
query = query
.replace(/ /g, '')
.normalize('NFD')
.replace(/[\u0300-\u036f]/g, '')
const recordLowerCase = record
.toLowerCase()
.normalize('NFD')
.replace(/[\u0300-\u036f]/g, '')
const fullMatchPos = recordLowerCase.indexOf(query)
if (fullMatchPos >= 0) {
// full query match has priority
return `${record.slice(0, fullMatchPos)}<span class="autoComplete_highlighted">${record.slice(fullMatchPos, fullMatchPos + query.length)}</span>${record.slice(fullMatchPos + query.length)}`
} else {
const match = []
let searchPosition = 0
for (let number = 0; number < recordLowerCase.length; number++) {
let recordChar = record[number]
if (searchPosition < query.length && recordLowerCase[number] === query[searchPosition]) {
recordChar = `<span class="autoComplete_highlighted">${recordChar}</span>`
searchPosition++
}
match.push(recordChar)
}
return match.join('')
}
},
events: {
input: {
keydown(event) {
switch (event.keyCode) {
// Down/Up arrow
case 40:
case 38:
event.preventDefault()
event.keyCode === 40 ? autoCompletejs.next() : autoCompletejs.previous()
break
// Enter
case 13:
if (autoCompletejs.cursor >= 0) {
event.preventDefault()
autoCompletejs.select(event)
}
break
}
}
}
},
resultsList: {
maxResults: 7,
id: 'autoComplete_list',
destination: '#autoCompleteResults',
position: 'beforeend',
tag: 'ul'
},
resultItem: {
element: (source, data) => {
source.innerHTML = `<div><span class="autocomplete_name">${data.match}</span><span class="autocomplete_type">${labels[data.value.type] || ''}</span></div>`
},
tag: 'li',
highlight: 'autoComplete_highlighted',
selected: 'autoComplete_selected'
}
})
document.addEventListener('keydown', function (event) {
if (event.key === '/' && !['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)) {
const searchInput = document.getElementById('autoComplete')
if (searchInput) {
event.preventDefault()
searchInput.focus()
}
}
})
document.querySelector('#autoComplete').addEventListener('selection', function (event) {
const selection = event.detail.selection.value
// Log the selected value
fetch('/api/features/autocomplete', {
method: 'POST',
headers: {
'content-type': 'application/json'
},
body: JSON.stringify({
...selection,
contact_id: contactId
})
})
// Redirect to the target URL
window.location = selection.url
})
================================================
FILE: apps/transport/client/javascripts/autocomplete_address.js
================================================
const AutoComplete = require('@tarekraafat/autocomplete.js/dist/autoComplete')
new AutoComplete({
data: {
src: async () => {
const query = document.querySelector('#autoComplete').value
// See https://geoservices.ign.fr/documentation/services/services-geoplateforme/autocompletion
const source = await fetch(
`https://data.geopf.fr/geocodage/completion/?text=${query}&poiType=administratif&type=StreetAddress&maximumResponses=5`
)
const data = await source.json()
return data.results
},
keys: ['fulltext'],
cache: false
},
selector: '#autoComplete',
threshold: 3,
debounce: 200,
highlight: true,
submit: false,
resultsList: {
maxResults: 5,
id: 'autoComplete_list',
class: 'no_legend',
destination: '#autoCompleteResults',
position: 'beforeend',
tag: 'ul',
noResults: true,
element: (list, data) => {
if (!data.results.length) {
const message = document.createElement('li')
message.innerHTML = `Pas de résultats pour "<span class="autoComplete_highlighted">${data.query}</span>"`
list.prepend(message)
}
}
},
resultItem: {
element: (source, data) => {
source.innerHTML = `<div><span class="autocomplete_name">${data.match}</span><span class="autocomplete_type">adresse</span></div>`
},
tag: 'li',
highlight: 'autoComplete_highlighted',
selected: 'autoComplete_selected'
}
})
document.addEventListener('keydown', function (event) {
if (event.key === '/' && !['INPUT', 'TEXTAREA'].includes(document.activeElement.tagName)) {
const searchInput = document.getElementById('autoComplete')
if (searchInput) {
event.preventDefault()
searchInput.focus()
}
}
})
================================================
FILE: apps/transport/client/javascripts/clipboard.js
================================================
import Clipboard from 'clipboard'
const clipboard = new Clipboard('.button')
clipboard.on('success', e => {
e.trigger.textContent = 'Ok!'
e.clearSelection()
})
================================================
FILE: apps/transport/client/javascripts/dataset-map.js
================================================
import L from 'leaflet'
import { IGN } from './map-config'
function initilizeMap(id) {
const map = L.map(id, { renderer: L.canvas() }).setView([46.505, 2], 5)
L.tileLayer(IGN.url, IGN.config).addTo(map)
const fg = L.featureGroup().addTo(map)
return { map, fg }
}
function createDatasetMap(divId, datasetDatagouvId) {
const { map, fg } = initilizeMap(divId)
fetch(`/api/datasets/${datasetDatagouvId}/geojson`)
.then(data => data.json())
.then(geojson => {
const gs = L.geoJSON(geojson).addTo(fg)
gs.bindPopup(layer => {
return layer.feature.properties.name
})
const bounds = fg.getBounds()
if (bounds.isValid()) {
map.fitBounds(fg.getBounds())
}
})
.catch(_ => console.log('invalid geojson'))
}
window.createDatasetMap = createDatasetMap
================================================
FILE: apps/transport/client/javascripts/explore.js
================================================
import { Socket } from 'phoenix'
import Leaflet from 'leaflet'
import { LeafletLayer } from 'deck.gl-leaflet'
import { ScatterplotLayer, GeoJsonLayer } from '@deck.gl/layers'
import { MapView } from '@deck.gl/core'
import { Mapbox } from './map-config'
const socket = new Socket('/socket', { params: { token: window.userToken } })
socket.connect()
const channel = socket.channel('explore', {})
channel
.join()
.receive('ok', resp => {
console.log('Joined successfully', resp)
})
.receive('error', resp => {
console.log('Unable to join', resp)
})
let gtfsChannelRef
// Default location is Paris
const DEFAULT_LAT = 48.8575
const DEFAULT_LNG = 2.3514
const DEFAULT_ZOOM = 6
function getMapParamsFromUrlPath() {
// Example Path: /explore?@34.0522,-118.2437,10
const path = decodeURIComponent(window.location.search)
const parts = path.split('@')
// If there is no '@' segment, return defaults
if (parts.length < 2) {
return { lat: DEFAULT_LAT, lng: DEFAULT_LNG, zoom: DEFAULT_ZOOM }
}
const coordsStr = parts[1]
const [latStr, lngStr, zoomStr] = coordsStr.split(',')
const lat = parseFloat(latStr) || DEFAULT_LAT
const lng = parseFloat(lngStr) || DEFAULT_LNG
const zoom = parseInt(zoomStr, 10) || DEFAULT_ZOOM
return { lat, lng, zoom }
}
const { lat, lng, zoom } = getMapParamsFromUrlPath()
const map = Leaflet.map('map', { renderer: Leaflet.canvas() }).setView([lat, lng], zoom)
Leaflet.tileLayer(Mapbox.url, {
accessToken: Mapbox.accessToken,
attribution: Mapbox.attribution,
maxZoom: Mapbox.maxZoom,
tileSize: Mapbox.tileSize,
zoomOffset: Mapbox.zoomOffset
}).addTo(map)
const visibility = { gtfsrt: document.getElementById('gtfs-rt-check').checked }
function prepareGTFSRTLayer(layerData) {
return new ScatterplotLayer({
id: 'gtfs-rt',
data: layerData,
pickable: true,
opacity: 1,
stroked: false,
filled: true,
radiusMinPixels: 4,
radiusMaxPixels: 10,
lineWidthMinPixels: 1,
visible: visibility.gtfsrt,
getPosition: d => {
return [d.position.longitude, d.position.latitude]
},
getRadius: _d => 1000,
getFillColor: _d => [0, 150, 136, 150],
getLineColor: _d => [0, 150, 136]
})
}
const deckGLLayer = new LeafletLayer({
views: [new MapView({ repeat: true })],
layers: [],
getTooltip
})
map.addLayer(deckGLLayer)
function getTooltip({ object, layer }) {
if (object) {
if (layer.id === 'bnlc-layer') {
return { html: `<strong>Aire de covoiturage</strong><br>${object.properties.nom_lieu}` }
} else if (layer.id === 'zfe-layer') {
return { html: '<strong>Zone à Faible Émission</strong>' }
} else if (layer.id === 'gbfs_stations-layer') {
return {
html: `<strong>Station GBFS</strong><br>
${object.properties.name}<br>
Capacité : ${object.properties.capacity}`
}
} else if (layer.id === 'irve-layer') {
return {
html: `<strong>Infrastructure de recharge</strong><br>
${object.properties.nom_station}<br>
Enseigne : ${object.properties.nom_enseigne}<br>
Identifiant station en itinérance : ${object.properties.id_station_itinerance}<br>
Nombre de points de charge : ${object.properties.nbre_pdc}`
}
} else {
return {
gitextract_hy3cfsqu/
├── .credo.exs
├── .dialyzer_ignore.exs
├── .editorconfig
├── .eslintignore
├── .formatter.exs
├── .github/
│ ├── CODEOWNERS
│ ├── actions/
│ │ └── checkout-compile/
│ │ └── action.yml
│ └── workflows/
│ ├── ops_tests.yml
│ ├── sentry_release.yml
│ ├── test.yml
│ └── trivy_scan.yml
├── .gitignore
├── .miniorc.template
├── .stylelintrc.json
├── .tool-versions
├── .vscode/
│ └── launch.json
├── Dockerfile
├── Dockerfile.dev
├── LICENSE.AGPL.txt
├── README.md
├── apps/
│ ├── shared/
│ │ ├── lib/
│ │ │ ├── application.ex
│ │ │ ├── appsignal_filter.ex
│ │ │ ├── cldr.ex
│ │ │ ├── conditional_json_encoder.ex
│ │ │ ├── data_visualization.ex
│ │ │ ├── date_time_display.ex
│ │ │ ├── hasher.ex
│ │ │ ├── helpers.ex
│ │ │ ├── http_stream_v2.ex
│ │ │ ├── proxy.ex
│ │ │ ├── req_custom_cache.ex
│ │ │ ├── resource_schema.ex
│ │ │ ├── s3.ex
│ │ │ ├── sentry_exception_filter.ex
│ │ │ ├── syntax_colors.ex
│ │ │ ├── time_wrapper.ex
│ │ │ ├── validation/
│ │ │ │ ├── gbfs_validator.ex
│ │ │ │ ├── gtfs_validator.ex
│ │ │ │ └── validator.ex
│ │ │ ├── wrapper/
│ │ │ │ ├── wrapper_httpoison.ex
│ │ │ │ └── wrapper_req.ex
│ │ │ └── wrapper_ex_aws.ex
│ │ ├── meta/
│ │ │ ├── schema-irve-dynamique.json
│ │ │ └── schema-irve-statique.json
│ │ ├── mix.exs
│ │ └── test/
│ │ ├── data_visualization_test.exs
│ │ ├── date_time_display_test.exs
│ │ ├── hasher_test.exs
│ │ ├── helpers/
│ │ │ └── helpers_test.exs
│ │ ├── http_stream_v2_test.exs
│ │ ├── resource_schema_test.exs
│ │ ├── s3_test.exs
│ │ ├── support/
│ │ │ ├── cache_case.ex
│ │ │ ├── mocks.ex
│ │ │ ├── s3_test_utils.ex
│ │ │ └── test_utils.ex
│ │ ├── test_helper.exs
│ │ ├── time_wrapper_test.exs
│ │ └── validation/
│ │ ├── gbfs_validator_test.exs
│ │ └── gtfs_validator_test.exs
│ └── transport/
│ ├── client/
│ │ ├── .prettierignore
│ │ ├── .prettierrc.json
│ │ ├── eslint.config.mjs
│ │ ├── javascripts/
│ │ │ ├── app.js
│ │ │ ├── autocomplete.js
│ │ │ ├── autocomplete_address.js
│ │ │ ├── clipboard.js
│ │ │ ├── dataset-map.js
│ │ │ ├── explore.js
│ │ │ ├── fullscreen_wrapper.js
│ │ │ ├── gtfs.js
│ │ │ ├── map-config.js
│ │ │ ├── map-geojson.js
│ │ │ ├── map.js
│ │ │ ├── resource-viz.js
│ │ │ ├── utils.js
│ │ │ ├── validation-map.js
│ │ │ └── vega.js
│ │ ├── package.json
│ │ ├── stylesheets/
│ │ │ ├── _states.scss
│ │ │ ├── app.scss
│ │ │ ├── components/
│ │ │ │ ├── _aom_table.scss
│ │ │ │ ├── _autocomplete.scss
│ │ │ │ ├── _backoffice.scss
│ │ │ │ ├── _blog.scss
│ │ │ │ ├── _choose_file.scss
│ │ │ │ ├── _colorful-button.scss
│ │ │ │ ├── _community-resources.scss
│ │ │ │ ├── _dataset-details.scss
│ │ │ │ ├── _discussions.scss
│ │ │ │ ├── _download_availability.scss
│ │ │ │ ├── _error.scss
│ │ │ │ ├── _explore.scss
│ │ │ │ ├── _feedback.scss
│ │ │ │ ├── _fullscreen-wrapper.scss
│ │ │ │ ├── _gtfs_diff.scss
│ │ │ │ ├── _guide.scss
│ │ │ │ ├── _icons.scss
│ │ │ │ ├── _landing_page_vls.scss
│ │ │ │ ├── _legal.scss
│ │ │ │ ├── _login.scss
│ │ │ │ ├── _logo.scss
│ │ │ │ ├── _mail.scss
│ │ │ │ ├── _mailing-list.scss
│ │ │ │ ├── _map-js.scss
│ │ │ │ ├── _message.scss
│ │ │ │ ├── _notification.scss
│ │ │ │ ├── _pagination.scss
│ │ │ │ ├── _resource-details.scss
│ │ │ │ ├── _search.scss
│ │ │ │ ├── _shortlist.scss
│ │ │ │ ├── _stats.scss
│ │ │ │ ├── _tooltip.scss
│ │ │ │ └── _validation.scss
│ │ │ ├── datasets.scss
│ │ │ ├── espace_producteur.scss
│ │ │ ├── globals/
│ │ │ │ ├── _externals.scss
│ │ │ │ ├── _mixins.scss
│ │ │ │ └── _variables.scss
│ │ │ ├── home.scss
│ │ │ ├── main.scss
│ │ │ ├── prism.css
│ │ │ ├── producteurs.scss
│ │ │ ├── reuser_space.scss
│ │ │ └── reuses.scss
│ │ ├── webpack.common.js
│ │ ├── webpack.dev.js
│ │ └── webpack.prod.js
│ ├── lib/
│ │ ├── S3/
│ │ │ ├── aggregates_uploader.ex
│ │ │ └── unzip.ex
│ │ ├── converters/
│ │ │ └── converter.ex
│ │ ├── data_frame/
│ │ │ ├── requiredness_processing.ex
│ │ │ └── validation_primitives.ex
│ │ ├── data_screens/
│ │ │ └── data_screens.ex
│ │ ├── datagouvfr/
│ │ │ ├── authentication.ex
│ │ │ ├── client/
│ │ │ │ ├── api.ex
│ │ │ │ ├── community_resources.ex
│ │ │ │ ├── datasets.ex
│ │ │ │ ├── discussions.ex
│ │ │ │ ├── oauth.ex
│ │ │ │ ├── organization.ex
│ │ │ │ ├── resources.ex
│ │ │ │ ├── reuses.ex
│ │ │ │ └── user.ex
│ │ │ └── client.ex
│ │ ├── db/
│ │ │ ├── administrative_division.ex
│ │ │ ├── aom.ex
│ │ │ ├── api_request.ex
│ │ │ ├── autocomplete.ex
│ │ │ ├── breaking_news.ex
│ │ │ ├── commune.ex
│ │ │ ├── company.ex
│ │ │ ├── contact.ex
│ │ │ ├── data_conversion.ex
│ │ │ ├── data_import.ex
│ │ │ ├── data_import_batch.ex
│ │ │ ├── dataset.ex
│ │ │ ├── dataset_follower.ex
│ │ │ ├── dataset_geographic_view.ex
│ │ │ ├── dataset_history.ex
│ │ │ ├── dataset_history_resources.ex
│ │ │ ├── dataset_monthly_metric.ex
│ │ │ ├── dataset_score.ex
│ │ │ ├── dataset_subtype.ex
│ │ │ ├── default_token.ex
│ │ │ ├── departement.ex
│ │ │ ├── encrypted/
│ │ │ │ └── binary.ex
│ │ │ ├── epci.ex
│ │ │ ├── feature_usage.ex
│ │ │ ├── geo_data/
│ │ │ │ ├── geo_data.ex
│ │ │ │ └── geo_data_import.ex
│ │ │ ├── gtfs/
│ │ │ │ ├── gtfs_agency.ex
│ │ │ │ ├── gtfs_calendar.ex
│ │ │ │ ├── gtfs_calendar_dates.ex
│ │ │ │ ├── gtfs_stop.ex
│ │ │ │ ├── gtfs_stop_times.ex
│ │ │ │ └── gtfs_trips.ex
│ │ │ ├── hidden_reuser_alert.ex
│ │ │ ├── irve_valid_file.ex
│ │ │ ├── irve_valid_pdc.ex
│ │ │ ├── logs_import.ex
│ │ │ ├── metrics.ex
│ │ │ ├── multi_validation.ex
│ │ │ ├── notification.ex
│ │ │ ├── notification_subscription.ex
│ │ │ ├── offer.ex
│ │ │ ├── organization.ex
│ │ │ ├── postgrex_types.ex
│ │ │ ├── processing_report.ex
│ │ │ ├── proxy_request.ex
│ │ │ ├── region.ex
│ │ │ ├── repo.ex
│ │ │ ├── resource.ex
│ │ │ ├── resource_download.ex
│ │ │ ├── resource_history.ex
│ │ │ ├── resource_metadata.ex
│ │ │ ├── resource_monthly_metric.ex
│ │ │ ├── resource_related.ex
│ │ │ ├── resource_unavailability.ex
│ │ │ ├── reuse.ex
│ │ │ ├── reuser_improved_data.ex
│ │ │ ├── stats_history.ex
│ │ │ ├── table_size_history.ex
│ │ │ ├── token.ex
│ │ │ └── user_feedback.ex
│ │ ├── ecto_interval.ex
│ │ ├── enroute/
│ │ │ └── chouette_valid_rulesets_client.ex
│ │ ├── gtfs/
│ │ │ └── utils.ex
│ │ ├── http/
│ │ │ └── utils.ex
│ │ ├── irve/
│ │ │ ├── data_frame.ex
│ │ │ ├── database_exporter.ex
│ │ │ ├── database_importer.ex
│ │ │ ├── deduplicator.ex
│ │ │ ├── dynamic_irve_schema.ex
│ │ │ ├── extractor.ex
│ │ │ ├── fetcher.ex
│ │ │ ├── http_pagination.ex
│ │ │ ├── processing.ex
│ │ │ ├── raw_report_item.ex
│ │ │ ├── raw_static_consolidation.ex
│ │ │ ├── simple_consolidation.ex
│ │ │ ├── simple_report_item.ex
│ │ │ ├── static_irve_schema.ex
│ │ │ ├── static_probes.ex
│ │ │ └── validator/
│ │ │ ├── data_frame_validation.ex
│ │ │ ├── field_validation.ex
│ │ │ ├── summary.ex
│ │ │ └── validator.ex
│ │ ├── jobs/
│ │ │ ├── RamboLauncher.ex
│ │ │ ├── analyze_irve_job.ex
│ │ │ ├── archive_metrics_job.ex
│ │ │ ├── backfill/
│ │ │ │ ├── backfill_metadata_non_gtfs_resource_history.ex
│ │ │ │ ├── backfill_resource_history_filesize.ex
│ │ │ │ ├── backfill_resource_history_resource_id.ex
│ │ │ │ ├── backfill_resource_history_schema_details.ex
│ │ │ │ └── remove_gtfs_rt_snapshots.ex
│ │ │ ├── clean_multi_validation_job.ex
│ │ │ ├── clean_on_demand_validation_job.ex
│ │ │ ├── consolidate_bnlc_job.ex
│ │ │ ├── consolidate_lez_job.ex
│ │ │ ├── conversions/
│ │ │ │ ├── clean_orphan_conversions_job.ex
│ │ │ │ ├── generic_converter.ex
│ │ │ │ ├── gtfs_generic_converter.ex
│ │ │ │ ├── gtfs_to_geojson_converter_job.ex
│ │ │ │ ├── netex_generic_converter.ex
│ │ │ │ └── netex_to_geojson_converter_job.ex
│ │ │ ├── create_tokens_job.ex
│ │ │ ├── custom_logo_conversion_job.ex
│ │ │ ├── database_backup_replication_job.ex
│ │ │ ├── database_vacuum_job.ex
│ │ │ ├── dataset_history_job.ex
│ │ │ ├── dataset_now_on_nap_notification_job.ex
│ │ │ ├── dataset_quality_score.ex
│ │ │ ├── datasets_climate_resilience_bill_not_lo_licence_job.ex
│ │ │ ├── datasets_switching_climate_resilience_bill_job.ex
│ │ │ ├── datasets_without_gtfs_rt_related_resources_notification_job.ex
│ │ │ ├── dedupe_history_job.ex
│ │ │ ├── default_tokens_job.ex
│ │ │ ├── expiration_notification_job.ex
│ │ │ ├── gbfs_multi_validation_job.ex
│ │ │ ├── gbfs_operators_notification_job.ex
│ │ │ ├── geo_data/
│ │ │ │ ├── base.ex
│ │ │ │ ├── bnlc_to_geo_data.ex
│ │ │ │ ├── gbfs_stations_to_geo_data.ex
│ │ │ │ ├── irve_to_geo_data.ex
│ │ │ │ └── lez_to_geo_data.ex
│ │ │ ├── gtfs_diff_job.ex
│ │ │ ├── gtfs_import_stops_job.ex
│ │ │ ├── gtfs_multi_validation_job.ex
│ │ │ ├── gtfs_rt_metadata.ex
│ │ │ ├── gtfs_rt_multi_validation_job.ex
│ │ │ ├── gtfs_to_db.ex
│ │ │ ├── import_companies_job.ex
│ │ │ ├── import_dataset_contact_points_job.ex
│ │ │ ├── import_dataset_follower_reuser_improved_data_job.ex
│ │ │ ├── import_dataset_followers_job.ex
│ │ │ ├── import_dataset_monthly_metrics_job.ex
│ │ │ ├── import_gbfs_feed_contact_email_job.ex
│ │ │ ├── import_resource_monthly_metrics_job.ex
│ │ │ ├── import_reuses_job.ex
│ │ │ ├── irve_raw_consolidation_job.ex
│ │ │ ├── irve_simple_consolidation_job.ex
│ │ │ ├── multi_validation_with_error_notification_job.ex
│ │ │ ├── netex_poller_job.ex
│ │ │ ├── new_comments_notification_job.ex
│ │ │ ├── new_datagouv_datasets_job.ex
│ │ │ ├── new_dataset_notifications_job.ex
│ │ │ ├── notification_subscription_producer_job.ex
│ │ │ ├── oban_logger.ex
│ │ │ ├── on_demand_netex_poller_job.ex
│ │ │ ├── on_demand_validation_helpers.ex
│ │ │ ├── on_demand_validation_job.ex
│ │ │ ├── periodic_reminder_producers_notification_job.ex
│ │ │ ├── promote_producer_space_job.ex
│ │ │ ├── promote_reuser_space_job.ex
│ │ │ ├── refresh_autocomplete_job.ex
│ │ │ ├── remove_history_job.ex
│ │ │ ├── resource_history_job.ex
│ │ │ ├── resource_history_jsonschema_validation_job.ex
│ │ │ ├── resource_history_schema_validation.ex
│ │ │ ├── resource_history_tableschema_validation_job.ex
│ │ │ ├── resource_history_validata_json_job.ex
│ │ │ ├── resource_history_validation_job.ex
│ │ │ ├── resource_unavailable_job.ex
│ │ │ ├── resource_unavailable_notification_job.ex
│ │ │ ├── resource_validation_job.ex
│ │ │ ├── resources_changed_notification_job.ex
│ │ │ ├── stops_registry_snapshot_job.ex
│ │ │ ├── table_size_history_job.ex
│ │ │ ├── update_contacts_job.ex
│ │ │ ├── update_counter_cache_job.ex
│ │ │ ├── visit_download_statistics_job.ex
│ │ │ ├── visit_proxy_statistics_job.ex
│ │ │ ├── visit_statistics_base.ex
│ │ │ ├── warn_user_inactivity_job.ex
│ │ │ ├── workflow.ex
│ │ │ └── workflow_dummy_jobs.ex
│ │ ├── mailer/
│ │ │ ├── admin_notifier.ex
│ │ │ ├── mailer.ex
│ │ │ └── user_notifier.ex
│ │ ├── mix/
│ │ │ └── tasks/
│ │ │ ├── dump_gtfs_rt.ex
│ │ │ ├── logs.ex
│ │ │ ├── npm.ex
│ │ │ ├── transport/
│ │ │ │ ├── add_dataset_subtypes.ex
│ │ │ │ ├── add_monaco.ex
│ │ │ │ ├── add_switzerland.ex
│ │ │ │ ├── import_aoms.ex
│ │ │ │ ├── import_communes.ex
│ │ │ │ ├── import_departements.ex
│ │ │ │ ├── import_epci.ex
│ │ │ │ ├── import_offers.ex
│ │ │ │ ├── open_api_spec.ex
│ │ │ │ └── update_france_geojson.ex
│ │ │ ├── url.ex
│ │ │ └── yarn.ex
│ │ ├── netex/
│ │ │ ├── Readme.md
│ │ │ ├── archive_parser.ex
│ │ │ ├── calendars_streaming_parser.ex
│ │ │ ├── chouette_valid_ruleset_generator.ex
│ │ │ ├── description_parser.ex
│ │ │ ├── enroute-starter-kit.json
│ │ │ ├── french_profile/
│ │ │ │ ├── v1.ex
│ │ │ │ └── v2.ex
│ │ │ ├── french_profile.ex
│ │ │ ├── netex_helpers.ex
│ │ │ ├── saxy_helpers.ex
│ │ │ ├── service_calendars_streaming_parser.ex
│ │ │ ├── stop_places_streaming_parser.ex
│ │ │ ├── to_geojson/
│ │ │ │ ├── coordinates.ex
│ │ │ │ ├── geojson_builder.ex
│ │ │ │ ├── parsers/
│ │ │ │ │ ├── quay_parser.ex
│ │ │ │ │ └── service_link_parser.ex
│ │ │ │ └── to_geojson.ex
│ │ │ └── types_of_frame_streaming_parser.ex
│ │ ├── queries/
│ │ │ └── dashboard_import_count.sql
│ │ ├── registry/
│ │ │ ├── engine.ex
│ │ │ ├── extractor.ex
│ │ │ ├── gtfs.ex
│ │ │ ├── model/
│ │ │ │ ├── data_source.ex
│ │ │ │ └── stop.ex
│ │ │ ├── netex.ex
│ │ │ └── result.ex
│ │ ├── siri_queries.ex
│ │ ├── siri_query_generator.ex
│ │ ├── support/
│ │ │ ├── logger/
│ │ │ │ └── translator.ex
│ │ │ ├── pretty_json_encoder.ex
│ │ │ └── proxy.ex
│ │ ├── transport/
│ │ │ ├── application.ex
│ │ │ ├── appsignal_ecto_telemetry.ex
│ │ │ ├── availability_checker.ex
│ │ │ ├── cache.ex
│ │ │ ├── cached_files.ex
│ │ │ ├── comments_checker.ex
│ │ │ ├── community_resource_cleaner.ex
│ │ │ ├── companies.ex
│ │ │ ├── consolidated_dataset.ex
│ │ │ ├── counter_cache.ex
│ │ │ ├── custom_search_message.ex
│ │ │ ├── data_checker.ex
│ │ │ ├── dataset_checks.ex
│ │ │ ├── dataset_index.ex
│ │ │ ├── expiration.ex
│ │ │ ├── explore_vehicle_positions_poller.ex
│ │ │ ├── file_downloads.ex
│ │ │ ├── gbfs_metadata.ex
│ │ │ ├── gbfs_to_geojson.ex
│ │ │ ├── gbfs_utils.ex
│ │ │ ├── gtfs_data.ex
│ │ │ ├── gtfs_diff.ex
│ │ │ ├── gtfs_export_stops.ex
│ │ │ ├── gtfs_import_stops.ex
│ │ │ ├── gtfs_query.ex
│ │ │ ├── gtfs_rt.ex
│ │ │ ├── history_fetcher.ex
│ │ │ ├── import_data.ex
│ │ │ ├── import_data_worker.ex
│ │ │ ├── log_cleaner.ex
│ │ │ ├── log_time_taken.ex
│ │ │ ├── notification_reason.ex
│ │ │ ├── phoenix_dashboard_telemetry.ex
│ │ │ ├── preemptive_api_cache.ex
│ │ │ ├── preemptive_base_cache.ex
│ │ │ ├── preemptive_home_stats_cache.ex
│ │ │ ├── preemptive_stats_cache.ex
│ │ │ ├── protobuf/
│ │ │ │ ├── gtfs-realtime.pb.ex
│ │ │ │ ├── gtfs-realtime.proto
│ │ │ │ └── readme.md
│ │ │ ├── quantum_scheduler.ex
│ │ │ ├── schemas.ex
│ │ │ ├── search_communes.ex
│ │ │ ├── stats_handler.ex
│ │ │ ├── telemetry.ex
│ │ │ ├── vault.ex
│ │ │ └── zip_probe.ex
│ │ ├── transport.ex
│ │ ├── transport_web/
│ │ │ ├── api/
│ │ │ │ ├── controllers/
│ │ │ │ │ ├── aom_controller.ex
│ │ │ │ │ ├── autocomplete_controller.ex
│ │ │ │ │ ├── datasets_controller.ex
│ │ │ │ │ ├── features_controller.ex
│ │ │ │ │ ├── geo_query_controller.ex
│ │ │ │ │ ├── gtfs_stops_controller.ex
│ │ │ │ │ ├── stats_controller.ex
│ │ │ │ │ └── validators_controller.ex
│ │ │ │ ├── plugs/
│ │ │ │ │ ├── auth.ex
│ │ │ │ │ ├── cache.ex
│ │ │ │ │ └── token_auth.ex
│ │ │ │ ├── router.ex
│ │ │ │ ├── schemas.ex
│ │ │ │ ├── spec.ex
│ │ │ │ └── views/
│ │ │ │ ├── aom_view.ex
│ │ │ │ ├── autocomplete_view.ex
│ │ │ │ ├── dataset_view.ex
│ │ │ │ ├── json_view.ex
│ │ │ │ └── stats_view.ex
│ │ │ ├── channels/
│ │ │ │ ├── explore_channel.ex
│ │ │ │ └── user_socket.ex
│ │ │ ├── components/
│ │ │ │ └── colorful_button.ex
│ │ │ ├── controllers/
│ │ │ │ ├── aoms_controller.ex
│ │ │ │ ├── atom_controller.ex
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── breaking_news_controller.ex
│ │ │ │ │ ├── broken_urls_controller.ex
│ │ │ │ │ ├── contact_controller.ex
│ │ │ │ │ ├── dashboard_controller.ex
│ │ │ │ │ ├── dataset_controller.ex
│ │ │ │ │ ├── gtfs_export_controller.ex
│ │ │ │ │ ├── notification_subscription_controller.ex
│ │ │ │ │ └── page_controller.ex
│ │ │ │ ├── contact_controller.ex
│ │ │ │ ├── conversion_controller.ex
│ │ │ │ ├── dataset_controller.ex
│ │ │ │ ├── discussion_controller.ex
│ │ │ │ ├── espace_producteur_controller.ex
│ │ │ │ ├── explore_controller.ex
│ │ │ │ ├── gbfs_analyzer_controller.ex
│ │ │ │ ├── gbfs_to_geojson_controller.ex
│ │ │ │ ├── geojson_conversion_controller.ex
│ │ │ │ ├── landing_pages_controller.ex
│ │ │ │ ├── page_controller.ex
│ │ │ │ ├── pagination_helpers.ex
│ │ │ │ ├── resource_controller.ex
│ │ │ │ ├── reuse_controller.ex
│ │ │ │ ├── reuser_space_controller.ex
│ │ │ │ ├── session_controller.ex
│ │ │ │ ├── stats_controller.ex
│ │ │ │ └── validation_controller.ex
│ │ │ ├── endpoint.ex
│ │ │ ├── gettext.ex
│ │ │ ├── live/
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── cache_live.ex
│ │ │ │ │ ├── cache_live.html.heex
│ │ │ │ │ ├── custom_tags_live.ex
│ │ │ │ │ ├── data_import_batch_report_live.ex
│ │ │ │ │ ├── data_import_batch_report_live.html.heex
│ │ │ │ │ ├── dataset_subtype_live.ex
│ │ │ │ │ ├── declarative_spatial_areas_live.ex
│ │ │ │ │ ├── edit_dataset_live.ex
│ │ │ │ │ ├── edit_dataset_live.html.heex
│ │ │ │ │ ├── email_preview_live.ex
│ │ │ │ │ ├── email_preview_live.html.heex
│ │ │ │ │ ├── irve_dashboard_live.ex
│ │ │ │ │ ├── irve_dashboard_live.html.heex
│ │ │ │ │ ├── jobs2_live.ex
│ │ │ │ │ ├── jobs2_live.html.heex
│ │ │ │ │ ├── jobs_live.ex
│ │ │ │ │ ├── jobs_live.html.heex
│ │ │ │ │ ├── jobs_table_component.ex
│ │ │ │ │ ├── legal_owner_select_live.ex
│ │ │ │ │ ├── offer_select_live.ex
│ │ │ │ │ ├── proxy_config_live.ex
│ │ │ │ │ ├── proxy_config_live.html.heex
│ │ │ │ │ ├── rate_limiter_live.ex
│ │ │ │ │ └── rate_limiter_live.html.heex
│ │ │ │ ├── dataset_notifications_live.ex
│ │ │ │ ├── dataset_notifications_live.html.heex
│ │ │ │ ├── discussions_live.ex
│ │ │ │ ├── feedback_live.ex
│ │ │ │ ├── feedback_live.html.heex
│ │ │ │ ├── follow_dataset_live.ex
│ │ │ │ ├── gtfs_diff_explain/
│ │ │ │ │ ├── explanations.ex
│ │ │ │ │ ├── structural_changes.ex
│ │ │ │ │ └── summary.ex
│ │ │ │ ├── gtfs_diff_explain.ex
│ │ │ │ ├── gtfs_diff_select_live/
│ │ │ │ │ ├── analysis.ex
│ │ │ │ │ ├── differences.ex
│ │ │ │ │ ├── gtfs_specification.ex
│ │ │ │ │ ├── results.ex
│ │ │ │ │ ├── setup.ex
│ │ │ │ │ ├── shared.ex
│ │ │ │ │ └── steps.ex
│ │ │ │ ├── gtfs_diff_select_live.ex
│ │ │ │ ├── gtfs_diff_select_live.html.heex
│ │ │ │ ├── notifications_live.ex
│ │ │ │ ├── notifications_live.html.heex
│ │ │ │ ├── on_demand_validation_live.ex
│ │ │ │ ├── on_demand_validation_live.html.heex
│ │ │ │ ├── on_demand_validation_select_live.ex
│ │ │ │ ├── on_demand_validation_select_live.html.heex
│ │ │ │ ├── proxy_requests_count_live.ex
│ │ │ │ ├── reuses_live.ex
│ │ │ │ ├── send_now_on_nap_notification_view.ex
│ │ │ │ ├── siri_querier_live.ex
│ │ │ │ ├── siri_querier_live.html.heex
│ │ │ │ ├── start_consolidate_job_view.ex
│ │ │ │ ├── user_space_datasets_live.ex
│ │ │ │ ├── validate_dataset_view.ex
│ │ │ │ └── validate_resource_live.ex
│ │ │ ├── plugs/
│ │ │ │ ├── custom_secure_browser_headers.ex
│ │ │ │ ├── halt.ex
│ │ │ │ ├── head.ex
│ │ │ │ ├── health_check.ex
│ │ │ │ ├── producer_data.ex
│ │ │ │ ├── put_locale.ex
│ │ │ │ ├── rate_limiter.ex
│ │ │ │ ├── reuser_data.ex
│ │ │ │ ├── router.ex
│ │ │ │ └── worker_healthcheck.ex
│ │ │ ├── presence.ex
│ │ │ ├── redirect.ex
│ │ │ ├── router.ex
│ │ │ ├── session.ex
│ │ │ ├── templates/
│ │ │ │ ├── aoms/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── atom/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── backoffice/
│ │ │ │ │ ├── breaking_news/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── broken_urls/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── contact/
│ │ │ │ │ │ ├── _notification_subscriptions.html.heex
│ │ │ │ │ │ ├── _notifications.html.heex
│ │ │ │ │ │ ├── form.html.heex
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ ├── dashboard/
│ │ │ │ │ │ └── index.html.heex
│ │ │ │ │ └── page/
│ │ │ │ │ ├── _dataset.html.heex
│ │ │ │ │ ├── form_dataset.html.heex
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── dataset/
│ │ │ │ │ ├── _banner.html.heex
│ │ │ │ │ ├── _community_resources.html.heex
│ │ │ │ │ ├── _community_ressource.html.heex
│ │ │ │ │ ├── _custom_message.html.heex
│ │ │ │ │ ├── _dataset_resources_history.html.heex
│ │ │ │ │ ├── _dataset_scores_chart.html.heex
│ │ │ │ │ ├── _dataset_type.html.heex
│ │ │ │ │ ├── _discussion.html.heex
│ │ │ │ │ ├── _header_links.html.heex
│ │ │ │ │ ├── _history_message.html.heex
│ │ │ │ │ ├── _licence.html.heex
│ │ │ │ │ ├── _notifications_sent.html.heex
│ │ │ │ │ ├── _resource.html.heex
│ │ │ │ │ ├── _resource_validation_summary.html.heex
│ │ │ │ │ ├── _resource_validation_summary_gtfs.html.heex
│ │ │ │ │ ├── _resource_validation_summary_netex.html.heex
│ │ │ │ │ ├── _resources_container.html.heex
│ │ │ │ │ ├── _reuser_message.html.heex
│ │ │ │ │ ├── details.html.heex
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── email/
│ │ │ │ │ ├── bnlc_consolidation_report.html.heex
│ │ │ │ │ ├── contact.html.heex
│ │ │ │ │ ├── dataset_now_on_nap.html.md
│ │ │ │ │ ├── dataset_with_error_producer.html.md
│ │ │ │ │ ├── dataset_with_error_reuser.html.md
│ │ │ │ │ ├── datasets_climate_resilience_bill_inappropriate_licence.html.heex
│ │ │ │ │ ├── datasets_switching_climate_resilience_bill.html.md
│ │ │ │ │ ├── datasets_without_gtfs_rt_related_resources.html.heex
│ │ │ │ │ ├── expiration.html.heex
│ │ │ │ │ ├── expiration_producer.html.md
│ │ │ │ │ ├── expiration_reuser.html.heex
│ │ │ │ │ ├── feedback.html.md
│ │ │ │ │ ├── inactive_datasets.html.heex
│ │ │ │ │ ├── new_comments_producer.html.heex
│ │ │ │ │ ├── new_comments_reuser.html.heex
│ │ │ │ │ ├── new_datagouv_datasets.html.heex
│ │ │ │ │ ├── new_dataset.html.heex
│ │ │ │ │ ├── oban_failure.html.md
│ │ │ │ │ ├── producer_with_subscriptions.html.md
│ │ │ │ │ ├── producer_without_subscriptions.html.md
│ │ │ │ │ ├── promote_producer_space.html.md
│ │ │ │ │ ├── promote_reuser_space.html.md
│ │ │ │ │ ├── resource_unavailable_producer.html.md
│ │ │ │ │ ├── resource_unavailable_reuser.html.md
│ │ │ │ │ ├── resources_changed.html.md
│ │ │ │ │ ├── unknown_gbfs_operator_feeds.html.heex
│ │ │ │ │ ├── visit_download_statistics.html.md
│ │ │ │ │ ├── visit_proxy_statistics.html.md
│ │ │ │ │ └── warn_inactivity.html.md
│ │ │ │ ├── error/
│ │ │ │ │ ├── 400_family_errors.html.heex
│ │ │ │ │ └── internal_server_error.html.heex
│ │ │ │ ├── espace_producteur/
│ │ │ │ │ ├── _important_information.html.heex
│ │ │ │ │ ├── _proxy_requests_stats.html.heex
│ │ │ │ │ ├── _specify_url.html.heex
│ │ │ │ │ ├── _upload_file.html.heex
│ │ │ │ │ ├── delete_resource_confirmation.html.heex
│ │ │ │ │ ├── discussions.html.heex
│ │ │ │ │ ├── download_statistics.html.heex
│ │ │ │ │ ├── edit_dataset.html.heex
│ │ │ │ │ ├── espace_producteur.html.heex
│ │ │ │ │ ├── proxy_statistics.html.heex
│ │ │ │ │ ├── resource_form.html.heex
│ │ │ │ │ └── reuser_improved_data.html.heex
│ │ │ │ ├── explore/
│ │ │ │ │ ├── explore.html.heex
│ │ │ │ │ └── gtfs_stops.html.heex
│ │ │ │ ├── gbfs_analyzer/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── geojson_conversion/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── landing_pages/
│ │ │ │ │ └── vls.html.heex
│ │ │ │ ├── layout/
│ │ │ │ │ ├── _breaking_news.html.heex
│ │ │ │ │ ├── _footer.html.heex
│ │ │ │ │ ├── _header.html.heex
│ │ │ │ │ ├── app.html.heex
│ │ │ │ │ └── email.html.heex
│ │ │ │ ├── page/
│ │ │ │ │ ├── _home_autocomplete.html.heex
│ │ │ │ │ ├── accessibility.html.md
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ ├── infos_producteurs.html.heex
│ │ │ │ │ ├── infos_reutilisateurs.html.heex
│ │ │ │ │ ├── join_the_community.html.heex
│ │ │ │ │ ├── login.html.heex
│ │ │ │ │ ├── missions.html.md
│ │ │ │ │ ├── nouveautes.html.md
│ │ │ │ │ └── single_page.html.heex
│ │ │ │ ├── resource/
│ │ │ │ │ ├── _download_availability.html.heex
│ │ │ │ │ ├── _errors_warnings_count.html.heex
│ │ │ │ │ ├── _geojson.html.heex
│ │ │ │ │ ├── _gtfs_coordinates_issue.html.heex
│ │ │ │ │ ├── _gtfs_duplicate_stop_sequence_issue.html.heex
│ │ │ │ │ ├── _gtfs_duplicate_stops_issue.html.heex
│ │ │ │ │ ├── _gtfs_extra_file_issue.html.heex
│ │ │ │ │ ├── _gtfs_generic_issue.html.heex
│ │ │ │ │ ├── _gtfs_invalid_shape_id_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_file_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_id_issue.html.heex
│ │ │ │ │ ├── _gtfs_missing_name_issue.html.heex
│ │ │ │ │ ├── _gtfs_negative_stop_duration_issue.html.heex
│ │ │ │ │ ├── _gtfs_no_calendar.html.heex
│ │ │ │ │ ├── _gtfs_rt.html.heex
│ │ │ │ │ ├── _gtfs_rt_errors_for_severity.html.heex
│ │ │ │ │ ├── _gtfs_rt_previous_validations_details.html.heex
│ │ │ │ │ ├── _gtfs_speed_issue.html.heex
│ │ │ │ │ ├── _gtfs_subfolder_issue.html.heex
│ │ │ │ │ ├── _gtfs_unloadable_model_issue.html.heex
│ │ │ │ │ ├── _gtfs_unusable_trip.html.heex
│ │ │ │ │ ├── _gtfs_unused_shape_issue.html.heex
│ │ │ │ │ ├── _gtfs_unused_stop_issue.html.heex
│ │ │ │ │ ├── _mobilitydata_metadata.html.heex
│ │ │ │ │ ├── _netex_generic_issue.html.heex
│ │ │ │ │ ├── _netex_validation_errors_v0_1_0.html.heex
│ │ │ │ │ ├── _netex_validation_errors_v0_2_x.html.heex
│ │ │ │ │ ├── _netex_xsd_schema.html.heex
│ │ │ │ │ ├── _on_demand_validation_hint.html.heex
│ │ │ │ │ ├── _related_resources.html.heex
│ │ │ │ │ ├── _requestor_ref.html.heex
│ │ │ │ │ ├── _resource_description.html.heex
│ │ │ │ │ ├── _resources_details_gtfs.html.heex
│ │ │ │ │ ├── _resources_details_netex.html.heex
│ │ │ │ │ ├── _resources_netex_validation_details.html.heex
│ │ │ │ │ ├── _search_bar.html.heex
│ │ │ │ │ ├── _validate_gbfs_now.html.heex
│ │ │ │ │ ├── _validate_gtfs_rt_now.html.heex
│ │ │ │ │ ├── _validation_report.html.heex
│ │ │ │ │ ├── _validation_report_gbfs.html.heex
│ │ │ │ │ ├── _validation_report_gtfs_rt.html.heex
│ │ │ │ │ ├── _validation_report_mobilitydata_gtfs.html.heex
│ │ │ │ │ ├── _validation_report_schema.html.heex
│ │ │ │ │ ├── _validation_summary.html.heex
│ │ │ │ │ ├── details.html.heex
│ │ │ │ │ ├── gtfs_details.html.heex
│ │ │ │ │ └── netex_details.html.heex
│ │ │ │ ├── reuse/
│ │ │ │ │ └── index.html.heex
│ │ │ │ ├── reuser_space/
│ │ │ │ │ ├── datasets_edit.html.heex
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ ├── new_token.html.heex
│ │ │ │ │ └── settings.html.heex
│ │ │ │ ├── stats/
│ │ │ │ │ ├── _maps.html.heex
│ │ │ │ │ ├── index.html.heex
│ │ │ │ │ └── metabase_dashboard.html.heex
│ │ │ │ └── validation/
│ │ │ │ ├── expired.html.heex
│ │ │ │ ├── show_gtfs.html.heex
│ │ │ │ ├── show_irve_statique.html.heex
│ │ │ │ ├── show_netex_v0_1_0.html.heex
│ │ │ │ └── show_netex_v0_2_x.html.heex
│ │ │ └── views/
│ │ │ ├── aoms_view.ex
│ │ │ ├── atom_view.ex
│ │ │ ├── backoffice/
│ │ │ │ ├── breaking_news_view.ex
│ │ │ │ ├── broken_urls_view.ex
│ │ │ │ ├── contact_view.ex
│ │ │ │ ├── dashboard_view.ex
│ │ │ │ └── page_view.ex
│ │ │ ├── bread_crumbs.ex
│ │ │ ├── dataset_view.ex
│ │ │ ├── email_view.ex
│ │ │ ├── error_helpers.ex
│ │ │ ├── error_view.ex
│ │ │ ├── espace_producteur_view.ex
│ │ │ ├── explore_view.ex
│ │ │ ├── gbfs_anaylzer_view.ex
│ │ │ ├── geojson_conversion_view.ex
│ │ │ ├── input_helpers.ex
│ │ │ ├── landing_pages_view.ex
│ │ │ ├── layout_view.ex
│ │ │ ├── markdown_handler.ex
│ │ │ ├── netex_report_components.ex
│ │ │ ├── page_view.ex
│ │ │ ├── resource_view.ex
│ │ │ ├── reuse_view.ex
│ │ │ ├── reuser_space_view.ex
│ │ │ ├── seo_metadata.ex
│ │ │ ├── stats_view.ex
│ │ │ └── validation_view.ex
│ │ ├── transport_web.ex
│ │ ├── unlock/
│ │ │ ├── aggregate_processor.ex
│ │ │ ├── batch_metrics.ex
│ │ │ ├── cached_fetch.ex
│ │ │ ├── config.ex
│ │ │ ├── controller.ex
│ │ │ ├── dynamic_irve/
│ │ │ │ ├── controller.ex
│ │ │ │ ├── feed_store.ex
│ │ │ │ ├── feed_worker.ex
│ │ │ │ └── renderer.ex
│ │ │ ├── dynamic_irve_supervisor.ex
│ │ │ ├── enforce_ttl.ex
│ │ │ ├── gunzip_tools.ex
│ │ │ ├── http_client.ex
│ │ │ ├── params.ex
│ │ │ ├── plugs/
│ │ │ │ └── token_auth.ex
│ │ │ ├── router.ex
│ │ │ ├── shared.ex
│ │ │ ├── siri.ex
│ │ │ ├── telemetry.ex
│ │ │ ├── views/
│ │ │ │ └── error_view.ex
│ │ │ └── xml_helper.ex
│ │ ├── validators/
│ │ │ ├── enroute_chouette_valid_client.ex
│ │ │ ├── gbfs_validator.ex
│ │ │ ├── gtfs_rt_validator.ex
│ │ │ ├── gtfs_transport_validator.ex
│ │ │ ├── jsonschema_validata_json_validator.ex
│ │ │ ├── jsonschema_validator.ex
│ │ │ ├── mobilitydata_gtfs_validator.ex
│ │ │ ├── mobilitydata_gtfs_validator_client.ex
│ │ │ ├── netex/
│ │ │ │ ├── CHANGELOG.md
│ │ │ │ ├── metadata_extractor.ex
│ │ │ │ ├── results_adapter.ex
│ │ │ │ ├── results_adapters/
│ │ │ │ │ ├── commons.ex
│ │ │ │ │ ├── v0_1_0.ex
│ │ │ │ │ ├── v0_2_0.ex
│ │ │ │ │ ├── v0_2_1.ex
│ │ │ │ │ └── v0_2_2.ex
│ │ │ │ └── validator.ex
│ │ │ ├── tableschema_validator.ex
│ │ │ ├── validator.ex
│ │ │ └── validator_selection.ex
│ │ └── zip.ex
│ ├── mix.exs
│ ├── priv/
│ │ ├── facilitators.csv
│ │ ├── gbfs_operators.csv
│ │ ├── gettext/
│ │ │ ├── administrative_division.pot
│ │ │ ├── alert.pot
│ │ │ ├── autocomplete.pot
│ │ │ ├── backoffice.pot
│ │ │ ├── backoffice_dataset.pot
│ │ │ ├── climate-resilience-bill.pot
│ │ │ ├── community_resource.pot
│ │ │ ├── contact.pot
│ │ │ ├── datagouv-dataset.pot
│ │ │ ├── dataset.pot
│ │ │ ├── db-dataset-score.pot
│ │ │ ├── db-dataset.pot
│ │ │ ├── db-resource-related.pot
│ │ │ ├── default.pot
│ │ │ ├── download-link.pot
│ │ │ ├── en/
│ │ │ │ └── LC_MESSAGES/
│ │ │ │ ├── administrative_division.po
│ │ │ │ ├── alert.po
│ │ │ │ ├── autocomplete.po
│ │ │ │ ├── backoffice.po
│ │ │ │ ├── backoffice_dataset.po
│ │ │ │ ├── climate-resilience-bill.po
│ │ │ │ ├── community_resource.po
│ │ │ │ ├── contact.po
│ │ │ │ ├── datagouv-dataset.po
│ │ │ │ ├── dataset.po
│ │ │ │ ├── db-dataset-score.po
│ │ │ │ ├── db-dataset.po
│ │ │ │ ├── db-resource-related.po
│ │ │ │ ├── default.po
│ │ │ │ ├── download-link.po
│ │ │ │ ├── errors.po
│ │ │ │ ├── espace-producteurs.po
│ │ │ │ ├── explore.po
│ │ │ │ ├── feedback.po
│ │ │ │ ├── gbfs_analyzer.po
│ │ │ │ ├── gtfs-diff.po
│ │ │ │ ├── gtfs-file-descriptions.po
│ │ │ │ ├── gtfs-transport-validator.po
│ │ │ │ ├── helper.po
│ │ │ │ ├── landing-vls.po
│ │ │ │ ├── netex-documentation.po
│ │ │ │ ├── netex-validator.po
│ │ │ │ ├── notification_subscription.po
│ │ │ │ ├── page-dataset-details.po
│ │ │ │ ├── page-index.po
│ │ │ │ ├── page-login.po
│ │ │ │ ├── page-nouveautes.po
│ │ │ │ ├── page-producteurs.po
│ │ │ │ ├── page-shortlist.po
│ │ │ │ ├── resource.po
│ │ │ │ ├── reusable_data.po
│ │ │ │ ├── reuser-space.po
│ │ │ │ ├── reuses.po
│ │ │ │ ├── seo.po
│ │ │ │ ├── stats.po
│ │ │ │ ├── user.po
│ │ │ │ ├── validation.po
│ │ │ │ ├── validations-explanations.po
│ │ │ │ └── validations.po
│ │ │ ├── errors.pot
│ │ │ ├── espace-producteurs.pot
│ │ │ ├── explore.pot
│ │ │ ├── feedback.pot
│ │ │ ├── fr/
│ │ │ │ └── LC_MESSAGES/
│ │ │ │ ├── administrative_division.po
│ │ │ │ ├── alert.po
│ │ │ │ ├── autocomplete.po
│ │ │ │ ├── backoffice.po
│ │ │ │ ├── backoffice_dataset.po
│ │ │ │ ├── climate-resilience-bill.po
│ │ │ │ ├── community_resource.po
│ │ │ │ ├── contact.po
│ │ │ │ ├── datagouv-dataset.po
│ │ │ │ ├── dataset.po
│ │ │ │ ├── db-dataset-score.po
│ │ │ │ ├── db-dataset.po
│ │ │ │ ├── db-resource-related.po
│ │ │ │ ├── default.po
│ │ │ │ ├── download-link.po
│ │ │ │ ├── errors.po
│ │ │ │ ├── espace-producteurs.po
│ │ │ │ ├── explore.po
│ │ │ │ ├── feedback.po
│ │ │ │ ├── gbfs_analyzer.po
│ │ │ │ ├── gtfs-diff.po
│ │ │ │ ├── gtfs-file-descriptions.po
│ │ │ │ ├── gtfs-transport-validator.po
│ │ │ │ ├── helper.po
│ │ │ │ ├── landing-vls.po
│ │ │ │ ├── netex-documentation.po
│ │ │ │ ├── netex-validator.po
│ │ │ │ ├── notification_subscription.po
│ │ │ │ ├── page-dataset-details.po
│ │ │ │ ├── page-index.po
│ │ │ │ ├── page-login.po
│ │ │ │ ├── page-nouveautes.po
│ │ │ │ ├── page-producteurs.po
│ │ │ │ ├── page-shortlist.po
│ │ │ │ ├── resource.po
│ │ │ │ ├── reusable_data.po
│ │ │ │ ├── reuser-space.po
│ │ │ │ ├── reuses.po
│ │ │ │ ├── seo.po
│ │ │ │ ├── stats.po
│ │ │ │ ├── user.po
│ │ │ │ ├── validation.po
│ │ │ │ ├── validations-explanations.po
│ │ │ │ └── validations.po
│ │ │ ├── gbfs_analyzer.pot
│ │ │ ├── gtfs-diff.pot
│ │ │ ├── gtfs-file-descriptions.pot
│ │ │ ├── gtfs-transport-validator.pot
│ │ │ ├── helper.pot
│ │ │ ├── landing-vls.pot
│ │ │ ├── netex-documentation.pot
│ │ │ ├── netex-validator.pot
│ │ │ ├── notification_subscription.pot
│ │ │ ├── page-dataset-details.pot
│ │ │ ├── page-index.pot
│ │ │ ├── page-login.pot
│ │ │ ├── page-nouveautes.pot
│ │ │ ├── page-producteurs.pot
│ │ │ ├── page-shortlist.pot
│ │ │ ├── resource.pot
│ │ │ ├── reusable_data.pot
│ │ │ ├── reuser-space.pot
│ │ │ ├── reuses.pot
│ │ │ ├── seo.pot
│ │ │ ├── stats.pot
│ │ │ ├── user.pot
│ │ │ ├── validation.pot
│ │ │ ├── validations-explanations.pot
│ │ │ └── validations.pot
│ │ ├── irve_prioritary_datasets.yml
│ │ ├── mobilitydata_gtfs_rules.json
│ │ ├── repo/
│ │ │ └── migrations/
│ │ │ ├── 20181121164437_create_region.exs
│ │ │ ├── 20181121165604_create_aom.exs
│ │ │ ├── 20181121170709_create_dataset.exs
│ │ │ ├── 20181121171826_create_partner.exs
│ │ │ ├── 20181204093045_add_included_in_dataset_to_datasets.exs
│ │ │ ├── 20181205100445_add_type_id_partner.exs
│ │ │ ├── 20181205134354_aom_postgis_geometry.exs
│ │ │ ├── 20181205163400_add_resources.exs
│ │ │ ├── 20181205164605_migrate_validations.exs
│ │ │ ├── 20181210164424_insee_aom_table.exs
│ │ │ ├── 20181211094634_migrate_format.exs
│ │ │ ├── 20181211164714_add_title_resource.exs
│ │ │ ├── 20181212091910_commune_to_aom.exs
│ │ │ ├── 20181218123622_only_use_postgis.exs
│ │ │ ├── 20181220111337_move_metadata.exs
│ │ │ ├── 20190108103116_remove_region_dataset_aom.exs
│ │ │ ├── 20190130145745_rename_transit_types.exs
│ │ │ ├── 20190204155646_fix_region.exs
│ │ │ ├── 20190207164219_remove_empty_url.exs
│ │ │ ├── 20190227165217_more_resource_metadata.exs
│ │ │ ├── 20190402122703_add_has_realtime.exs
│ │ │ ├── 20190408091202_validations_table.exs
│ │ │ ├── 20190424095327_add_dataset_activation.exs
│ │ │ ├── 20190425142259_add_resource_is_available.exs
│ │ │ ├── 20190506153738_add_dataset_document.exs
│ │ │ ├── 20190509163526_delete_cascade_validations.exs
│ │ │ ├── 20190516142725_search_commune.exs
│ │ │ ├── 20190703092429_use_simple_index_search.exs
│ │ │ ├── 20190910091521_change_siren.exs
│ │ │ ├── 20190911085235_add_resource_content_hash.exs
│ │ │ ├── 20191216144800_add_resource_tags.exs
│ │ │ ├── 20200106094910_nb_reuses.exs
│ │ │ ├── 20200110124026_unaccent.exs
│ │ │ ├── 20200114101848_change_search_conf.exs
│ │ │ ├── 20200114143832_add_dataset_view.exs
│ │ │ ├── 20200116092044_link_dataset_commune.exs
│ │ │ ├── 20200116163306_more_pt.exs
│ │ │ ├── 20200120092242_add_communes_to_view.exs
│ │ │ ├── 20200120154256_dataset_associated_territory.exs
│ │ │ ├── 20200130152852_epci.exs
│ │ │ ├── 20200212092113_materialized_geographic_view.exs
│ │ │ ├── 20200217142350_add_commune_region.exs
│ │ │ ├── 20200220141013_fuzzystrmatch.exs
│ │ │ ├── 20200224093551_places_mat_view.exs
│ │ │ ├── 20200225140241_slug_constraint.exs
│ │ │ ├── 20200311095433_correction_of_dataset_geographic_view.exs
│ │ │ ├── 20200420082026_validation_fields.exs
│ │ │ ├── 20200429171646_netex_conversion_date.exs
│ │ │ ├── 20200505124346_validation_sha.exs
│ │ │ ├── 20200527084259_community_resources.exs
│ │ │ ├── 20200603103539_add_metadatas_for_on_the_fly_validations.exs
│ │ │ ├── 20200603130643_description_text.exs
│ │ │ ├── 20200608155921_rename_netex.exs
│ │ │ ├── 20200610162056_geojson_content_hash.exs
│ │ │ ├── 20200616101043_add_resource_filesize.exs
│ │ │ ├── 20200622141231_create_import_logs_table.exs
│ │ │ ├── 20200623112618_import_logs_unlimited_text_length_logs_table.exs
│ │ │ ├── 20200623134041_create_validation_logs_table.exs
│ │ │ ├── 20200623162648_skipped_validation.exs
│ │ │ ├── 20200630154908_original_resource_url.exs
│ │ │ ├── 20200703080414_resource_log_validation.exs
│ │ │ ├── 20200818124059_add_geojson_to_validation_data.exs
│ │ │ ├── 20200907134321_change_dataset_on_delete_behavior.exs
│ │ │ ├── 20200908085058_resource_datagouv_id.exs
│ │ │ ├── 20201103174924_features_and_modes.exs
│ │ │ ├── 20201103183100_more_places.exs
│ │ │ ├── 20201112110459_update_aoms_fk.exs
│ │ │ ├── 20201209152013_add_indices.exs
│ │ │ ├── 20201214163517_stats_history.exs
│ │ │ ├── 20210126172850_remove_refresh_places_resources_trigger.exs
│ │ │ ├── 20210512142927_add_discussion_timestamp.exs
│ │ │ ├── 20210615132842_charsharing_to_carpooling.exs
│ │ │ ├── 20210622150648_dataset_communes_on_delete.exs
│ │ │ ├── 20210623140048_add_nouvelle_caledonie.exs
│ │ │ ├── 20210630130031_add_resource_schema.exs
│ │ │ ├── 20210811122529_rename-category-bike-scooter-sharing.exs
│ │ │ ├── 20211006144855_rename_category_bike_path.exs
│ │ │ ├── 20211018122851_add_oban_jobs_table.exs
│ │ │ ├── 20211021094750_move_fields_to_jsonb.exs
│ │ │ ├── 20211122101004_add_metrics.exs
│ │ │ ├── 20211130094242_add_resource_history.exs
│ │ │ ├── 20211209090542_add_breaking_news_table.exs
│ │ │ ├── 20211209121042_migrate_gbfs_metrics.exs
│ │ │ ├── 20211210082242_add_resource_unavailability.exs
│ │ │ ├── 20211214142804_create_conversion_table.exs
│ │ │ ├── 20220104092238_add_resource_history_last_up_to_date_at_field.exs
│ │ │ ├── 20220118101217_allow_netex_conversion.exs
│ │ │ ├── 20220124133742_add_category_locations.exs
│ │ │ ├── 20220126101800_add_title_resource_history.exs
│ │ │ ├── 20220208143147_create_gtfs_stops_table.exs
│ │ │ ├── 20220210142527_add_table_gtfs_stop_times.exs
│ │ │ ├── 20220214161600_migrate_old_validation_rows.exs
│ │ │ ├── 20220225104500_drop_table_partner.exs
│ │ │ ├── 20220301085100_add_timestamps_data_import.exs
│ │ │ ├── 20220321151717_remove_resource_deprecated_fields.exs
│ │ │ ├── 20220322090153_rename_dataset_title_fields.exs
│ │ │ ├── 20220322135059_update_search_conf_field_name.exs
│ │ │ ├── 20220329113451_add_index_history_dataset_id.exs
│ │ │ ├── 20220406125936_add_geo_data.exs
│ │ │ ├── 20220412131157_add_resource_filetype.exs
│ │ │ ├── 20220419124355_rename_category_road_network.exs
│ │ │ ├── 20220429092956_set_up_multi_validation.exs
│ │ │ ├── 20220502083641_add_metadata_table.exs
│ │ │ ├── 20220502130846_add_resource_type.exs
│ │ │ ├── 20220505115659_add_resource_display_position.exs
│ │ │ ├── 20220505121748_create_resource_history_format_index.exs
│ │ │ ├── 20220510124001_create_multi_validation_index.exs
│ │ │ ├── 20220523132328_add_resource_history_resource_id.exs
│ │ │ ├── 20220525084346_add_multi_validation_args.exs
│ │ │ ├── 20220531123506_modify_resource_history_constraint.exs
│ │ │ ├── 20220601135310_change-multi-validation-field-name.exs
│ │ │ ├── 20220615090656_add_index_resource_history_resource_id.exs
│ │ │ ├── 20220615123711_add_max_error_field.exs
│ │ │ ├── 20220922134600_fix_region_data.exs
│ │ │ ├── 20220923080044_add_table_departement.exs
│ │ │ ├── 20220923080526_add_columns_communes.exs
│ │ │ ├── 20220929073801_fix_geometries.exs
│ │ │ ├── 20220930122054_add_metadata_modes_features.exs
│ │ │ ├── 20221004125601_set_noumea_siren.exs
│ │ │ ├── 20221004135750_enable_trigger_refresh_places_commune_trigger.exs
│ │ │ ├── 20221004151551_add_modes_features_index.exs
│ │ │ ├── 20221005125656_update_modes_features_places.exs
│ │ │ ├── 20221010144415_rename_population_cols_aom.exs
│ │ │ ├── 20221031094523_dataset_add_timestamps.exs
│ │ │ ├── 20221110101806_rename_mobility_license.exs
│ │ │ ├── 20221129131631_dataset_add_archived_at_field.exs
│ │ │ ├── 20221201165336_create_dataset_history.exs
│ │ │ ├── 20221206132945_on_delete_nilify.exs
│ │ │ ├── 20221206135302_data_import_delete.exs
│ │ │ ├── 20221208083708_add_indexes_resource.exs
│ │ │ ├── 20221228090455_create_oban_peers.exs
│ │ │ ├── 20221228090553_swap_primary_oban_indexes.exs
│ │ │ ├── 20221228142229_remove_resource_start_end_date.exs
│ │ │ ├── 20230103165252_validation_v1_shutdown.exs
│ │ │ ├── 20230110074451_create_data_import_batch.exs
│ │ │ ├── 20230111115335_add_index_dataset_history_slug.exs
│ │ │ ├── 20230112132326_add_notifications.exs
│ │ │ ├── 20230120093740_dataset_add_unique_datagouv_id.exs
│ │ │ ├── 20230124110826_notifications_change_dataset_on_delete_behavior.exs
│ │ │ ├── 20230124131704_dataset_tags.exs
│ │ │ ├── 20230125145703_notifications_add_dataset_datagouv_id.exs
│ │ │ ├── 20230202090645_add_dataset_organization_type.exs
│ │ │ ├── 20230206131831_add_contacts.exs
│ │ │ ├── 20230220102200_add_notification_subscription.exs
│ │ │ ├── 20230302084455_add_dataset_legal_owner.exs
│ │ │ ├── 20230308085359_add_indexes_dataset_history_resources.exs
│ │ │ ├── 20230309102424_add_mailing_list_title_contact.exs
│ │ │ ├── 20230322080214_dataset_change_types_date_cols.exs
│ │ │ ├── 20230327135824_correct_dataset_population_update.exs
│ │ │ ├── 20230329071947_resource_change_types_datetime_cols.exs
│ │ │ ├── 20230404074406_add_resource_related.exs
│ │ │ ├── 20230412080437_contact_add_secondary_phone_number.exs
│ │ │ ├── 20230420134229_notification_subscription_rename_licence_ouverte_reason.exs
│ │ │ ├── 20230427083218_contact_add_datagouv_user_id_last_login_at.exs
│ │ │ ├── 20230524122950_notification_subscription_rename_switching_licences_reason.exs
│ │ │ ├── 20230525152222_create_table_dataset_score.exs
│ │ │ ├── 20230609122110_dataset_add_organization_id.exs
│ │ │ ├── 20230623121709_aom_remove_parent_dataset_id.exs
│ │ │ ├── 20230626130232_notification_subscription_add_role.exs
│ │ │ ├── 20230630074914_add_organization.exs
│ │ │ ├── 20230719123439_dataset_score_topic_index.exs
│ │ │ ├── 20230719124102_dataset_legal_owner_company_siren_type.exs
│ │ │ ├── 20230828142610_aom_trim_departement.exs
│ │ │ ├── 20230913130308_metrics_gbfs_target_name.exs
│ │ │ ├── 20230925124412_gbfs_metrics_remove_404_rows.exs
│ │ │ ├── 20231019121309_data_conversion_add_columns.exs
│ │ │ ├── 20231110140739_remove_columns_from_aom.exs
│ │ │ ├── 20231214145408_dataset_monthly_metrics.exs
│ │ │ ├── 20231222145809_improve_epci.exs
│ │ │ ├── 20231231135108_add_dataset_search_payload.exs
│ │ │ ├── 20240102144643_dataset_add_not_null.exs
│ │ │ ├── 20240110085755_resource_related_on_delete.exs
│ │ │ ├── 20240117075117_resource_monthly_metrics.exs
│ │ │ ├── 20240118133546_create_processing_reports.exs
│ │ │ ├── 20240123133743_dataset_custom_logos_columns.exs
│ │ │ ├── 20240212101805_dataset_add_custom_logo_changed_at.exs
│ │ │ ├── 20240310152509_use_standard_timestamps_precision.exs
│ │ │ ├── 20240311141215_create_dataset_followers.exs
│ │ │ ├── 20240318143637_dataset_add_is_hidden.exs
│ │ │ ├── 20240418140451_resource_url_size.exs
│ │ │ ├── 20240422145836_create_user_feedback.exs
│ │ │ ├── 20240502134737_notification_subscriptions_migrate_platform_producer.exs
│ │ │ ├── 20240515113652_add_dataset_datagouv_id_to_resource_monthly_metrics.exs
│ │ │ ├── 20240515155123_relax_constraint_on_resource_monthly_metrics.exs
│ │ │ ├── 20240520130507_add_datagouv_id_to_dataset_history_resources.exs
│ │ │ ├── 20240604121342_notifications_add_columns.exs
│ │ │ ├── 20240619052714_add_resource_counter_cache.exs
│ │ │ ├── 20240725130522_dataset_organization_type_migrate_value.exs
│ │ │ ├── 20240726065227_dataset_organization_id_not_null.exs
│ │ │ ├── 20240730130026_notifications_adjust_foreign_keys.exs
│ │ │ ├── 20240905121512_dataset_remove_paris2024_tag.exs
│ │ │ ├── 20240926074311_contact_add_creation_source.exs
│ │ │ ├── 20241120132849_geo_data_import_add_slug.exs
│ │ │ ├── 20250204084455_reuser_improved_data.exs
│ │ │ ├── 20250225130151_resource_history_add_reuser_improved_data.exs
│ │ │ ├── 20250305130125_resource_history_nullable_datagouv_id.exs
│ │ │ ├── 20250305142322_create_reuse.exs
│ │ │ ├── 20250305143006_create_reuse_dataset.exs
│ │ │ ├── 20250331133652_token.exs
│ │ │ ├── 20250527092321_create_api_request.exs
│ │ │ ├── 20250530171428_token_add_default_for_contact_id.exs
│ │ │ ├── 20250603104959_default_token.exs
│ │ │ ├── 20250604082439_resource_download.exs
│ │ │ ├── 20250610141027_migrate_dataset_types.exs
│ │ │ ├── 20250617100226_create_administrative_division.exs
│ │ │ ├── 20250618122521_feature_usage.exs
│ │ │ ├── 20250620141849_create_dataset_declarative_spatial_area.exs
│ │ │ ├── 20250816134003_upgrade_oban_jobs_to_v13.exs
│ │ │ ├── 20250818131430_create_proxy_request.exs
│ │ │ ├── 20250905085103_create_irve_tables.exs
│ │ │ ├── 20250905125828_places_add_departement.exs
│ │ │ ├── 20250915101514_places_add_epci.exs
│ │ │ ├── 20250917125049_dataset_geographic_view_administrative_division.exs
│ │ │ ├── 20250922081424_administrative_division_population.exs
│ │ │ ├── 20250922083326_dataset_search_update_remove_population.exs
│ │ │ ├── 20251006140759_delete_cascade_resource_downloads.exs
│ │ │ ├── 20251009114246_add_result_digest_to_multi_validation.exs
│ │ │ ├── 20251027091600_commune_aom_siren.exs
│ │ │ ├── 20251117085613_delete_dataset_communes.exs
│ │ │ ├── 20251117090342_dataset_delete_aom_id_region_id.exs
│ │ │ ├── 20251120121226_create_offer.exs
│ │ │ ├── 20251120125829_create_dataset_offer.exs
│ │ │ ├── 20251121164427_region_iso3166.exs
│ │ │ ├── 20251202120339_region_remove_is_completed.exs
│ │ │ ├── 20251203161550_add_indexes_resource_history.exs
│ │ │ ├── 20251204151437_places_add_offers.exs
│ │ │ ├── 20251204172125_offer_index.exs
│ │ │ ├── 20251204202714_resource_format_override.exs
│ │ │ ├── 20251208124339_places_rename_autocomplete.exs
│ │ │ ├── 20251209110327_departement_iso3166.exs
│ │ │ ├── 20251209123938_add_table_size_history.exs
│ │ │ ├── 20251209144903_add_gtfs_agency.exs
│ │ │ ├── 20251209172445_add_multi_validation_binary_result.exs
│ │ │ ├── 20251215081025_autocomplete_add_format.exs
│ │ │ ├── 20251218133947_autocomplete_add_dataset.exs
│ │ │ ├── 20251220162908_contact_add_locale.exs
│ │ │ ├── 20260117113753_create_hidden_reuser_alerts.exs
│ │ │ ├── 20260122180704_create_dataset_subtype.exs
│ │ │ ├── 20260126155921_allow_netex_to_geojson_conversion.exs
│ │ │ ├── 20260205144049_rename_irve_valid_file_columns.exs
│ │ │ ├── 20260209120000_add_missing_fields_to_irve_valid_file.exs
│ │ │ ├── 20260311000000_create_company.exs
│ │ │ ├── 20260317000001_resource_related_unique_index.exs
│ │ │ └── sql/
│ │ │ ├── autocomplete_add_dataset.sql
│ │ │ ├── autocomplete_add_format.sql
│ │ │ └── places_rename_autocomplete.sql
│ │ ├── search_custom_messages.yml
│ │ └── zfe_ids.csv
│ └── test/
│ ├── build_test.exs
│ ├── datagouvfr/
│ │ └── client/
│ │ ├── api_test.exs
│ │ ├── community_resources_test.exs
│ │ ├── datasets_test.exs
│ │ ├── discussions_test.exs
│ │ ├── resources_test.exs
│ │ └── reuses_test.exs
│ ├── db/
│ │ ├── administrative_division_test.exs
│ │ ├── contact_test.exs
│ │ ├── data_conversion_test.exs
│ │ ├── dataset_follower_test.exs
│ │ ├── dataset_history_test.exs
│ │ ├── dataset_monthly_metric_test.exs
│ │ ├── dataset_score_test.exs
│ │ ├── dataset_test.exs
│ │ ├── geom_test.exs
│ │ ├── irve_valid_pdc_test.exs
│ │ ├── metrics_test.exs
│ │ ├── multi_validation_test.exs
│ │ ├── notification_subscription_test.exs
│ │ ├── notification_test.exs
│ │ ├── resource_history_test.exs
│ │ ├── resource_metadata_test.exs
│ │ ├── resource_monthly_metric_test.exs
│ │ ├── resource_related_test.exs
│ │ ├── resource_test.exs
│ │ ├── resource_unavailability_test.exs
│ │ ├── reuse_test.exs
│ │ ├── test_helper.exs
│ │ ├── token_test.exs
│ │ └── user_feedback_test.exs
│ ├── documentation_links_test.exs
│ ├── ecto_interval_test.exs
│ ├── enroute/
│ │ └── chouette_valid_rulesets_client_test.exs
│ ├── fixture/
│ │ ├── cassettes/
│ │ │ ├── dataset/
│ │ │ │ ├── dataset-aom.json.json
│ │ │ │ ├── dataset-no-region-nor-ao.json.json
│ │ │ │ ├── dataset-region-and-country.json.json
│ │ │ │ ├── dataset-region-ao.json.json
│ │ │ │ ├── dataset-with-multiple-cities-and-country.json.json
│ │ │ │ ├── dataset-with-multiple-cities.json.json
│ │ │ │ └── dataset_twice.json
│ │ │ └── user/
│ │ │ ├── dataset-add-2.json
│ │ │ ├── organization-create-4.json
│ │ │ ├── organization-datasets-1.json
│ │ │ ├── organizations-0.json
│ │ │ └── user-without-organization-3.json
│ │ ├── files/
│ │ │ ├── bibus-brest-gtfs-rt-alerts.pb
│ │ │ ├── csv_latin1.csv
│ │ │ ├── csv_utf8.csv
│ │ │ └── gtfs-rt-validator-errors.json
│ │ ├── gbfs/
│ │ │ ├── free_bike_status.2.2.json
│ │ │ ├── gbfs.2.2.json
│ │ │ ├── gbfs.3.0.json
│ │ │ ├── station_information.2.2.json
│ │ │ ├── station_information.3.0.json
│ │ │ ├── station_status.2.2.json
│ │ │ ├── station_status.3.0.json
│ │ │ └── vehicle_status.3.0.json
│ │ └── schemas/
│ │ ├── schemas.json
│ │ ├── validata_source_error.json
│ │ ├── validata_unknown_custom_check_error.json
│ │ ├── validata_with_errors.json
│ │ ├── validata_with_file_error.json
│ │ ├── validata_with_no_errors.json
│ │ └── validata_with_opening_hours_error.json
│ ├── gtfs/
│ │ └── utils_test.exs
│ ├── netex/
│ │ ├── archive_parser_test.exs
│ │ ├── chouette_valid_ruleset_generator_test.exs
│ │ ├── stop_places_streaming_parser_test.exs
│ │ └── to_geojson/
│ │ ├── coordinates_test.exs
│ │ ├── geojson_builder_test.exs
│ │ ├── parsers/
│ │ │ ├── quay_parser_test.exs
│ │ │ └── service_link_parser_test.exs
│ │ └── to_geojson_test.exs
│ ├── no_css_inline_style_test.exs
│ ├── registry/
│ │ ├── gtfs_test.exs
│ │ ├── model_test.exs
│ │ └── result_test.exs
│ ├── safe_svg_test.exs
│ ├── support/
│ │ ├── channel_case.ex
│ │ ├── conn_case.ex
│ │ ├── data_gouv_api_fixtures.ex
│ │ ├── database_case.ex
│ │ ├── enroute_chouette_valid_client_helpers.ex
│ │ ├── external_case.ex
│ │ ├── factory.ex
│ │ ├── file_stream_utils.ex
│ │ ├── live_case.ex
│ │ ├── live_view_test_helpers.ex
│ │ ├── mocks.ex
│ │ ├── netex_validation_report.ex
│ │ ├── platform.ex
│ │ ├── siri_queries.ex
│ │ ├── tmp_file.ex
│ │ └── zip_creator.ex
│ ├── swoosh_assert_no_email_sent_test.exs
│ ├── test_helper.exs
│ ├── transport/
│ │ ├── S3/
│ │ │ └── aggregates_uploader_test.exs
│ │ ├── application_test.exs
│ │ ├── availability_checker_test.exs
│ │ ├── cache_cachex_test.exs
│ │ ├── cached_files_test.exs
│ │ ├── comments_checker_test.exs
│ │ ├── community_resource_cleaner_test.exs
│ │ ├── companies_test.exs
│ │ ├── consolidated_dataset_test.exs
│ │ ├── custom_search_message_test.exs
│ │ ├── data_checker_test.exs
│ │ ├── data_frame/
│ │ │ └── validation_primitives_test.exs
│ │ ├── dataset_checks_test.exs
│ │ ├── dataset_index_test.exs
│ │ ├── expiration_test.exs
│ │ ├── file_downloads_test.exs
│ │ ├── gbfs_metadata_test.exs
│ │ ├── gbfs_to_geojson_test.exs
│ │ ├── gtfs_data_test.exs
│ │ ├── gtfs_diff_test.exs
│ │ ├── gtfs_query_test.exs
│ │ ├── gtfs_rt_test.exs
│ │ ├── history_fetcher_test.exs
│ │ ├── import_data_service_test.exs
│ │ ├── import_data_test.exs
│ │ ├── irve/
│ │ │ ├── database_importer_test.exs
│ │ │ ├── deduplicator_test.exs
│ │ │ ├── dynamic_irve_schema_test.exs
│ │ │ ├── irve_data_frame_test.exs
│ │ │ ├── irve_extractor_test.exs
│ │ │ ├── irve_http_pagination_test.exs
│ │ │ ├── irve_raw_static_consolidation_test.exs
│ │ │ ├── irve_static_probes_test.exs
│ │ │ ├── processing_test.exs
│ │ │ ├── simple_consolidation_test.exs
│ │ │ ├── static_irve_schema_test.exs
│ │ │ ├── validation/
│ │ │ │ ├── data_frame_validation_test.exs
│ │ │ │ └── validator_test.exs
│ │ │ └── zip_probe_test.exs
│ │ ├── jobs/
│ │ │ ├── archive_metrics_job_test.exs
│ │ │ ├── backfill/
│ │ │ │ ├── backfill_metadata_non_gtfs_resource_history_test.exs
│ │ │ │ ├── backfill_resource_history_filesize_test.exs
│ │ │ │ ├── backfill_resource_history_resource_id_test.exs
│ │ │ │ └── backfill_resource_history_schema_details_test.exs
│ │ │ ├── clean_multi_validation_job_test.exs
│ │ │ ├── clean_on_demand_validation_job_test.exs
│ │ │ ├── clean_orphan_conversions_job_test.exs
│ │ │ ├── consolidate_bnlc_job_test.exs
│ │ │ ├── consolidate_lez_job_test.exs
│ │ │ ├── conversions/
│ │ │ │ ├── gtfs_to_geojson_converter_job_test.exs
│ │ │ │ ├── netex_to_geojson_converter_job_test.exs
│ │ │ │ ├── single_gtfs_to_geojson_converter_job_test.exs
│ │ │ │ └── single_netex_to_geojson_converter_job_test.exs
│ │ │ ├── counter_cache_test.exs
│ │ │ ├── create_tokens_job_test.exs
│ │ │ ├── custom_logo_conversion_job_test.exs
│ │ │ ├── database_backup_replication_job_test.exs
│ │ │ ├── database_vacuum_job_test.exs
│ │ │ ├── dataset_history_job_test.exs
│ │ │ ├── dataset_now_on_nap_notification_job_test.exs
│ │ │ ├── dataset_quality_score_test.exs
│ │ │ ├── datasets_climate_resilience_bill_not_lo_licence_job_test.exs
│ │ │ ├── datasets_switching_climate_resilience_bill_job_test.exs
│ │ │ ├── datasets_without_gtfs_rt_related_resources_notification_job_test.exs
│ │ │ ├── dedupe_history_job_test.exs
│ │ │ ├── default_token_job_test.exs
│ │ │ ├── expiration_notification_job_test.exs
│ │ │ ├── gbfs_multi_validation_job_test.exs
│ │ │ ├── gbfs_operators_notification_job_test.exs
│ │ │ ├── geo_data/
│ │ │ │ ├── bnlc_to_geodata_test.exs
│ │ │ │ ├── gbfs_stations_to_geo_data_test.exs
│ │ │ │ ├── irve_to_geodata_test.exs
│ │ │ │ └── lez_to_geo_data_test.exs
│ │ │ ├── gtfs_diff_job_test.exs
│ │ │ ├── gtfs_import_stops_job_test.exs
│ │ │ ├── gtfs_import_stops_test.exs
│ │ │ ├── gtfs_rt_metadata_test.exs
│ │ │ ├── gtfs_rt_multi_validation_job_test.exs
│ │ │ ├── gtfs_to_db_test.exs
│ │ │ ├── import_companies_job_test.exs
│ │ │ ├── import_dataset_contact_points_job_test.exs
│ │ │ ├── import_dataset_follower_reuser_improved_data_job_test.exs
│ │ │ ├── import_dataset_followers_job_test.exs
│ │ │ ├── import_dataset_monthly_metrics_job_test.exs
│ │ │ ├── import_gbfs_feed_contact_point_job_test.exs
│ │ │ ├── import_resource_monthly_metrics_job_test.exs
│ │ │ ├── import_reuses_job_test.exs
│ │ │ ├── multi_validation_with_error_notification_job_test.exs
│ │ │ ├── netex_poller_job_test.exs
│ │ │ ├── new_comments_notification_job_test.exs
│ │ │ ├── new_datagouv_datasets_job_test.exs
│ │ │ ├── new_dataset_notifications_job_test.exs
│ │ │ ├── notification_subscription_producer_job_test.exs
│ │ │ ├── oban_logger_test.exs
│ │ │ ├── on_demand_netex_poller_job_test.exs
│ │ │ ├── on_demand_validation_job_test.exs
│ │ │ ├── periodic_reminder_producers_notification_job_test.exs
│ │ │ ├── promote_producer_space_job_test.exs
│ │ │ ├── promote_reuser_space_job_test.exs
│ │ │ ├── refresh_autocomplete_job_test.exs
│ │ │ ├── remove_history_job_test.exs
│ │ │ ├── resource_history_job_test.exs
│ │ │ ├── resource_history_jsonschema_validation_job_test.exs
│ │ │ ├── resource_history_tableschema_validation_job_test.exs
│ │ │ ├── resource_history_validation_job_test.exs
│ │ │ ├── resource_unavailable_job_test.exs
│ │ │ ├── resource_unavailable_notification_job_test.exs
│ │ │ ├── resource_validation_job_test.exs
│ │ │ ├── resources_changed_notification_job_test.exs
│ │ │ ├── table_size_history_job_test.exs
│ │ │ ├── transport_tools_test.exs
│ │ │ ├── update_contacts_job_test.exs
│ │ │ ├── update_counter_cache_job_test.exs
│ │ │ ├── visit_download_statistics_job_test.exs
│ │ │ ├── visit_proxy_statistics_job_test.exs
│ │ │ ├── visit_statistics_base_test.exs
│ │ │ ├── warn_user_inactivity_job_test.exs
│ │ │ └── workflow_test.exs
│ │ ├── notifiers_test.exs
│ │ ├── schemas_test.exs
│ │ ├── search_communes_test.exs
│ │ ├── siri_queries_test.exs
│ │ ├── stats_handler_test.exs
│ │ ├── telemetry_test.exs
│ │ └── validators/
│ │ ├── enroute_chouette_valid_client_test.exs
│ │ ├── gbfs_validator_test.exs
│ │ ├── gtfs_rt_validator_test.exs
│ │ ├── gtfs_transport_validator_test.exs
│ │ ├── jsonschema_validata_json_test.exs
│ │ ├── jsonschema_validator_test.exs
│ │ ├── mobilitydata_gtfs_validator_client_test.exs
│ │ ├── mobilitydata_gtfs_validator_test.exs
│ │ ├── netex/
│ │ │ ├── metadata_extractor_test.exs
│ │ │ ├── results_adapters/
│ │ │ │ ├── commons_test.exs
│ │ │ │ ├── v0_1_0_test.exs
│ │ │ │ ├── v0_2_0_test.exs
│ │ │ │ └── v0_2_1_test.exs
│ │ │ └── validator_test.exs
│ │ ├── tableschema_validator_test.exs
│ │ └── validator_selection_test.exs
│ ├── transport_web/
│ │ ├── controllers/
│ │ │ ├── aoms_controller_test.exs
│ │ │ ├── api/
│ │ │ │ ├── aom_controller_test.exs
│ │ │ │ ├── autocomplete_controller_test.exs
│ │ │ │ ├── datasets_controller_test.exs
│ │ │ │ ├── features_controller_test.exs
│ │ │ │ ├── geo_query_controller_test.exs
│ │ │ │ ├── gtfs_stops_controller_test.exs
│ │ │ │ ├── schemas_test.exs
│ │ │ │ ├── stats_controller_test.exs
│ │ │ │ └── validators_controller_test.exs
│ │ │ ├── atom_controller_test.exs
│ │ │ ├── backoffice/
│ │ │ │ ├── backoffice_controller_test.exs
│ │ │ │ ├── broken_urls_controller_test.exs
│ │ │ │ ├── contact_controller_test.exs
│ │ │ │ ├── dataset_controller_test.exs
│ │ │ │ ├── notification_subscription_controller_test.exs
│ │ │ │ └── page_controller_test.exs
│ │ │ ├── breaking_news_controller_test.exs
│ │ │ ├── contact_controller_test.exs
│ │ │ ├── conversion_controller_test.exs
│ │ │ ├── dataset_controller_test.exs
│ │ │ ├── dataset_search_test.exs
│ │ │ ├── discussion_controller_test.exs
│ │ │ ├── espace_producteur_controller_test.exs
│ │ │ ├── explore_controller_test.exs
│ │ │ ├── gbfs_analyzer_controller_test.exs
│ │ │ ├── landing_page_controller_test.exs
│ │ │ ├── nav_test.exs
│ │ │ ├── page_controller_test.exs
│ │ │ ├── pagination_helpers_test.exs
│ │ │ ├── resource_controller_test.exs
│ │ │ ├── reuse_controller_test.exs
│ │ │ ├── reuser_space_controller_test.exs
│ │ │ ├── seo_test.exs
│ │ │ ├── session_controller_test.exs
│ │ │ ├── siri_querier_test.exs
│ │ │ └── validation_controller_test.exs
│ │ ├── live_views/
│ │ │ ├── cache_live_test.exs
│ │ │ ├── custom_tags_live_test.exs
│ │ │ ├── dataset_notifications_live_test.exs
│ │ │ ├── discussions_live_test.exs
│ │ │ ├── edit_dataset_live_test.exs
│ │ │ ├── email_preview_live_test.exs
│ │ │ ├── feedback_live_test.exs
│ │ │ ├── follow_dataset_live_test.exs
│ │ │ ├── gtfs_diff_explain_test.exs
│ │ │ ├── gtfs_diff_select_live_test.exs
│ │ │ ├── notifications_live_test.exs
│ │ │ ├── proxy_config_live_test.exs
│ │ │ ├── proxy_requests_count_live_test.exs
│ │ │ ├── rate_limiter_live_test.exs
│ │ │ ├── reuses_live_test.exs
│ │ │ ├── user_space_datasets_live_test.exs
│ │ │ └── validate_resource_live_test.exs
│ │ ├── plugs/
│ │ │ ├── custom_secure_browser_headers_test.exs
│ │ │ ├── producer_data_test.exs
│ │ │ ├── rate_limiter_test.exs
│ │ │ └── worker_healthcheck_test.exs
│ │ ├── routing/
│ │ │ ├── canonical_host_redirect_test.exs
│ │ │ ├── headers_and_cookies_test.exs
│ │ │ ├── health_check_test.exs
│ │ │ ├── proxy_routing_test.exs
│ │ │ ├── put_locale_test.exs
│ │ │ └── router_test.exs
│ │ ├── session_test.exs
│ │ └── views/
│ │ ├── avatar_view_test.exs
│ │ ├── backoffice/
│ │ │ └── page_view_test.exs
│ │ ├── dataset_view_test.exs
│ │ ├── error_view_test.exs
│ │ ├── markdown_handler_test.exs
│ │ ├── no_html_in_markdown_templates_test.exs
│ │ ├── page_view_test.exs
│ │ └── resource_view_test.exs
│ ├── unlock/
│ │ ├── batch_metrics_test.exs
│ │ ├── config_fetcher_test.exs
│ │ ├── controllers/
│ │ │ └── unlock_controller_test.exs
│ │ ├── dynamic_irve/
│ │ │ └── feed_worker_test.exs
│ │ ├── dynamic_irve_integration_test.exs
│ │ ├── enforce_ttl_test.exs
│ │ ├── finch_impl_test.exs
│ │ ├── github_config_test.exs
│ │ ├── plugs/
│ │ │ └── token_auth_test.exs
│ │ ├── shared_test.exs
│ │ ├── siri_test.exs
│ │ └── test_helper.exs
│ ├── with_mock_must_not_use_async_check_test.exs
│ ├── with_mox_verify_on_exit_test.exs
│ └── zip_test.exs
├── blog/
│ └── README.md
├── config/
│ ├── config.exs
│ ├── data_sharing_pilot.exs
│ ├── database.exs
│ ├── datagouvfr.exs
│ ├── dev.exs
│ ├── dev.secret.template.exs
│ ├── gbfs_validator.exs
│ ├── gtfs_validator.exs
│ ├── mail.exs
│ ├── prod.exs
│ ├── proxy-config.sample.yml
│ ├── runtime.exs
│ └── test.exs
├── dialyzer-plt/
│ └── .gitkeep
├── docker/
│ └── database/
│ ├── Dockerfile
│ ├── create_test_db.sh
│ ├── readme.md
│ └── restore_db.sh
├── docker-compose.yml
├── docker_phoenix_startup.sh
├── docs/
│ ├── contacts.md
│ ├── data_model.livemd
│ ├── inventaire_donnees_geographiques_septembre_2023.md
│ ├── scaleway/
│ │ ├── README.md
│ │ ├── bucket_lifecycle_configuration_production.json
│ │ ├── bucket_lifecycle_configuration_staging.json
│ │ ├── bucket_policy_production.json
│ │ └── bucket_policy_staging.json
│ └── upgrade_versions.md
├── generate_deps_changelogs.exs
├── glossary.md
├── learning_track.md
├── livebook/
│ ├── irve-total.livemd
│ └── irve-watts-detection.livemd
├── mix.exs
├── ops_tests/
│ └── ops_tests.exs
├── restore_db.sh
├── screens/
│ ├── screens.exs
│ ├── screens.livemd
│ └── vehicles.livemd
└── scripts/
├── .gitignore
├── README.md
├── api/
│ ├── .gitignore
│ ├── filter_gtfs_rt_by_entity_types.exs
│ └── spec_check.exs
├── api_datasets_check.exs
├── backfill_multi_validation_binary_result.exs
├── backfill_multi_validation_digest.exs
├── backfill_netex_metadata.exs
├── backups-analysis.livemd
├── chouette_valid_rulesets.exs
├── compare-json.sh
├── compare_http.exs
├── contacts/
│ ├── contact_export.exs
│ ├── insert_contacts.exs
│ └── insert_notification_subscriptions.exs
├── debugging.livemd
├── download_resource_history_files.exs
├── drop_and_recreate_database.exs
├── elixir_predicate_upgrade.exs
├── git_diff_experiment.exs
├── gtfs_expiry.exs
├── hash_compute_experiment.exs
├── http_generic_testing.exs
├── ingest_resource_history_files.exs
├── internal_zip_checksum_experiment.exs
├── irve/
│ ├── .gitignore
│ ├── analyze-irve.exs
│ ├── difference.exs
│ ├── dump-simple-consolidation.exs
│ ├── dyn-analysis.exs
│ ├── dynamic-irve.exs
│ ├── horodatage-formats.exs
│ ├── process-one.exs
│ ├── process-raw-static-consolidation.exs
│ ├── process-simple-consolidation.exs
│ ├── report-on-simple-consolidation.exs
│ ├── stats.exs
│ ├── validate.exs
│ └── validate_and_import_local_file.exs
├── irve_diff.livemd
├── memory.exs
├── netex-accessibilite.exs
├── netex_analyzer.exs
├── netex_layout_analyzer.exs
├── notion_link_databases.exs
├── oban/
│ ├── oban_experiments.exs
│ └── oban_suite.exs
├── pagination_example.exs
├── registre-arrets.exs
├── req_httpoison_testing.exs
├── req_stream.exs
├── search_engine.exs
├── siri/
│ ├── siri_check.exs
│ ├── siri_create_doc.exs
│ └── siri_request_parsing.exs
└── subquery.exs
Showing preview only (656K chars total). Download the full file or copy to clipboard to get everything.
SYMBOL INDEX (6615 symbols across 1066 files)
FILE: apps/shared/lib/application.ex
class Shared.Application (line 1) | defmodule Shared.Application
method start (line 6) | def start(_type, _args) do
method cache_name (line 23) | def cache_name, do: Shared.Cachex
FILE: apps/shared/lib/appsignal_filter.ex
class TransportWeb.Plugs.AppSignalFilter (line 1) | defmodule TransportWeb.Plugs.AppSignalFilter
method init (line 16) | def init(options), do: options
method call (line 18) | def call(%Plug.Conn{} = conn, _opts) do
method must_ignore? (line 29) | defp must_ignore?(%Plug.Conn{} = conn) do
FILE: apps/shared/lib/cldr.ex
class Transport.Cldr (line 1) | defmodule Transport.Cldr
FILE: apps/shared/lib/conditional_json_encoder.ex
class Transport.Shared.ConditionalJSONEncoder (line 1) | defmodule Transport.Shared.ConditionalJSONEncoder
method encode_to_iodata! (line 35) | def encode_to_iodata!(data) do
FILE: apps/shared/lib/data_visualization.ex
class Transport.DataVisualization (line 1) | defmodule Transport.DataVisualization
method impl (line 8) | defp impl, do: Application.get_env(:transport, :data_visualization)
method has_features (line 9) | def has_features(validations), do: impl().has_features(validations)
method validation_data_vis (line 10) | def validation_data_vis(validations), do: impl().validation_data_vis(v...
class Transport.DataVisualization.Impl (line 13) | defmodule Transport.DataVisualization.Impl
method has_features (line 21) | def has_features(nil), do: false
method has_features (line 22) | def has_features(data_visualization), do: not Enum.empty?(data_visua...
method validation_data_vis (line 26) | def validation_data_vis(nil), do: nil
method validation_data_vis (line 28) | def validation_data_vis(validations) do
method data_vis_per_issue_type (line 34) | defp data_vis_per_issue_type(issues) do
FILE: apps/shared/lib/date_time_display.ex
class Shared.DateTimeDisplay (line 1) | defmodule Shared.DateTimeDisplay
method format_date (line 25) | def format_date(%DateTime{} = datetime, locale), do: format_date(DateT...
method format_date (line 26) | def format_date(%Date{} = date, "fr"), do: Calendar.strftime(date, "%d...
method format_date (line 27) | def format_date(%Date{} = date, "en"), do: Calendar.strftime(date, "%Y...
method format_date (line 28) | def format_date(%Date{} = date, _), do: format_date(date, "fr")
method format_date (line 34) | def format_date(nil, _), do: ""
method format_date (line 36) | def format_date(date, locale, iso_extended: true) do
method format_datetime_to_date (line 48) | def format_datetime_to_date(%DateTime{} = dt, locale) do
method format_datetime_to_date (line 52) | def format_datetime_to_date(nil, _), do: ""
method format_datetime_to_paris (line 87) | def format_datetime_to_paris(dt, locale), do: format_datetime_to_paris...
method format_datetime_to_paris (line 89) | def format_datetime_to_paris(%DateTime{} = dt, locale, options) do
method format_datetime_to_paris (line 95) | def format_datetime_to_paris(%NaiveDateTime{} = ndt, locale, options) do
method format_datetime_to_paris (line 107) | def format_datetime_to_paris(nil, _, _), do: ""
method format_time_to_paris (line 142) | def format_time_to_paris(dt, locale) do
method format_time_to_paris (line 146) | def format_time_to_paris(%DateTime{} = dt, locale, options) do
method format_time_to_paris (line 152) | def format_time_to_paris(%NaiveDateTime{} = ndt, locale, options) do
method format_time_to_paris (line 164) | def format_time_to_paris(nil, _, _), do: ""
method format_duration (line 218) | def format_duration(duration_in_seconds, locale) do
method relative_datetime_in_days (line 237) | def relative_datetime_in_days(days, locale) do
method relative_datetime_in_seconds (line 241) | def relative_datetime_in_seconds(seconds, locale) do
method convert_to_paris_time (line 246) | def convert_to_paris_time(%DateTime{} = dt) do
method convert_to_paris_time (line 250) | def convert_to_paris_time(%NaiveDateTime{} = ndt) do
method get_localized_datetime_format (line 254) | defp get_localized_datetime_format("en" = locale, options) do
method get_localized_datetime_format (line 258) | defp get_localized_datetime_format(locale, options) do
method get_localized_time_format (line 262) | defp get_localized_time_format("en", options) do
method get_localized_time_format (line 270) | defp get_localized_time_format(_locale, options) do
FILE: apps/shared/lib/hasher.ex
class Hasher (line 20) | defmodule Hasher
class Hasher.Wrapper (line 1) | defmodule Hasher.Wrapper
method impl (line 7) | def impl, do: Application.get_env(:transport, :hasher_impl)
class Hasher.Dummy (line 10) | defmodule Hasher.Dummy
method get_content_hash (line 17) | def get_content_hash(_url), do: "xxx"
method get_content_hash (line 29) | def get_content_hash(url) do
method get_content_hash_http (line 41) | def get_content_hash_http(url) do
method compute_sha256 (line 57) | def compute_sha256(url) do
method find_etag (line 79) | defp find_etag({"Etag", v}), do: v
method find_etag (line 80) | defp find_etag(_), do: nil
method compute_checksum (line 82) | def compute_checksum(stream, algorithm) do
method get_file_hash (line 90) | def get_file_hash(file_path) do
FILE: apps/shared/lib/helpers.ex
class Helpers (line 1) | defmodule Helpers
method filename_from_url (line 28) | def filename_from_url(_), do: nil
method format_number_maybe_nil (line 62) | def format_number_maybe_nil(nil, options), do: options |> Keyword.fetc...
method format_number_maybe_nil (line 63) | def format_number_maybe_nil(n, options), do: format_number(n, options ...
method last_updated (line 66) | def last_updated(resources) do
FILE: apps/shared/lib/http_stream_v2.ex
class HTTPStreamV2 (line 1) | defmodule HTTPStreamV2
method fetch_status_and_hash (line 29) | def fetch_status_and_hash(url, max_redirect \\ @default_allowed_redire...
method fetch_status_and_hash (line 36) | def fetch_status_and_hash(url, max_redirect, redirect_count) do
method handle_stream_response (line 51) | defp handle_stream_response({:status, status}, acc) do
method handle_stream_response (line 58) | defp handle_stream_response({:headers, headers}, acc) do
method handle_stream_response (line 73) | defp handle_stream_response({:data, data}, acc) do
method compute_final_hash (line 78) | defp compute_final_hash(result) do
method fetch_status (line 90) | def fetch_status(url) do
method location_header (line 99) | defp location_header(headers) do
method handle_stream_status (line 103) | defp handle_stream_status({:status, status}, acc) do
method handle_stream_status (line 114) | defp handle_stream_status({:headers, headers}, acc) do
method fetch_status_follow_redirect (line 126) | def fetch_status_follow_redirect(
method fetch_status_follow_redirect (line 137) | def fetch_status_follow_redirect(url, max_redirect, redirect_count) do
FILE: apps/shared/lib/proxy.ex
class Shared.Proxy (line 1) | defmodule Shared.Proxy
method forwarded_headers_allowlist (line 13) | def forwarded_headers_allowlist do
FILE: apps/shared/lib/req_custom_cache.ex
class Transport.Shared.ReqCustomCache (line 1) | defmodule Transport.Shared.ReqCustomCache
method attach (line 12) | def attach(%Req.Request{} = request, options \\ []) do
method request_local_cache_step (line 20) | def request_local_cache_step(request) do
method response_local_cache_step (line 33) | def response_local_cache_step({request, response}) do
method cache_path (line 47) | def cache_path(cache_dir, %{method: :get} = request) do
method cache_path (line 62) | def cache_path(request) do
method load_cache (line 67) | def load_cache(path) do
method write_cache (line 72) | def write_cache(path, response) do
FILE: apps/shared/lib/resource_schema.ex
class Transport.Shared.ResourceSchema (line 1) | defmodule Transport.Shared.ResourceSchema
method guess_name (line 29) | def guess_name(%{"schema" => %{"name" => schema}}, _dataset_type) do
method guess_name (line 33) | def guess_name(%{"url" => url, "format" => format}, "road-data") do
method guess_name (line 39) | def guess_name(_, _), do: nil
method guess_version (line 53) | def guess_version(%{"schema" => %{"version" => version}}) do
method guess_version (line 57) | def guess_version(_), do: nil
FILE: apps/shared/lib/s3.ex
class Transport.S3 (line 1) | defmodule Transport.S3
method bucket_name (line 9) | def bucket_name(feature) do
method permanent_url (line 15) | def permanent_url(feature, path \\ "") do
method bucket_names (line 26) | def bucket_names do
method delete_object! (line 32) | def delete_object!(feature, path) do
method stream_to_s3! (line 39) | def stream_to_s3!(feature, local_path, upload_path, options \\ []) do
method download_file (line 50) | def download_file(feature, remote_path, local_path) do
method get_object! (line 68) | def get_object!(feature, remote_path) do
method download_file! (line 78) | def download_file!(feature, remote_path, dest) do
method head_object! (line 86) | def head_object!(feature, remote_path) do
method remote_copy_file! (line 94) | def remote_copy_file!(feature, remote_path_src, remote_path_dest) do
FILE: apps/shared/lib/sentry_exception_filter.ex
class Transport.Shared.SentryExceptionFilter (line 1) | defmodule Transport.Shared.SentryExceptionFilter
method exclude_exception? (line 30) | def exclude_exception?(%FunctionClauseError{function: :do_match, arity...
method exclude_exception? (line 32) | def exclude_exception?(_exception, _source), do: false
FILE: apps/shared/lib/syntax_colors.ex
class Transport.Inspect (line 1) | defmodule Transport.Inspect
method syntax_colors (line 26) | def syntax_colors, do: @syntax_colors
method pretty_inspect (line 31) | def pretty_inspect(data) do
FILE: apps/shared/lib/time_wrapper.ex
class TimeWrapper (line 1) | defmodule TimeWrapper
method parse! (line 12) | def parse!(date_as_string, "{ISO:Extended}" = param) do
method parse! (line 16) | def parse!(date_as_string, "{YYYY}{0M}{0D}" = param) do
method parse! (line 22) | def parse!(datetime_as_string, "{WDshort}, {D} {Mshort} {YYYY} {h24}:{...
method diff (line 26) | def diff(first, second, :hours = param) do
method now (line 30) | def now do
method shift (line 34) | def shift(dt, months: months) do
method convert (line 38) | def convert(dt, "UTC") do
method convert_to_paris_time (line 42) | def convert_to_paris_time(dt) do
FILE: apps/shared/lib/validation/gbfs_validator.ex
class Shared.Validation.GBFSValidator (line 1) | defmodule Shared.Validation.GBFSValidator
class Summary (line 6) | defmodule Summary
class Wrapper (line 29) | defmodule Wrapper
method impl (line 33) | defp impl, do: Application.get_env(:transport, :gbfs_validator_impl)
method validate (line 36) | def validate(url), do: impl().validate(url)
class HTTPValidatorClient (line 39) | defmodule HTTPValidatorClient
method validate (line 48) | def validate(url) do
method validator_url (line 69) | defp validator_url, do: Application.fetch_env!(:transport, :gbfs_valid...
method call_api (line 71) | defp call_api(url) do
FILE: apps/shared/lib/validation/gtfs_validator.ex
class Shared.Validation.GtfsValidator (line 9) | defmodule Shared.Validation.GtfsValidator
class Shared.Validation.GtfsValidator.Wrapper (line 1) | defmodule Shared.Validation.GtfsValidator.Wrapper
method impl (line 6) | def impl, do: Application.get_env(:transport, :gtfs_validator, Share...
method validate (line 33) | def validate(gtfs),
method validate_from_url (line 41) | def validate_from_url(gtfs_url),
method build_validate_url (line 48) | defp build_validate_url, do: gtfs_validator_base_url() <> "/validate"
method remote_gtfs_validation_query (line 50) | def remote_gtfs_validation_query(gtfs_url) do
method gtfs_validator_base_url (line 55) | defp gtfs_validator_base_url do
method handle_validation_response (line 62) | defp handle_validation_response({:ok, %{status_code: 200, body: body}}...
method handle_validation_response (line 73) | defp handle_validation_response({_, %{body: body}}) do
method handle_validation_response (line 78) | defp handle_validation_response({:error, _}) do
method http_client (line 82) | defp http_client, do: Application.fetch_env!(:transport, :httpoison_impl)
method send_get_request (line 84) | defp send_get_request(url), do: http_client().get(url, [], recv_timeou...
method send_post_request (line 86) | defp send_post_request(url, body), do: http_client().post(url, body, [...
FILE: apps/shared/lib/validation/validator.ex
class Shared.Validation.Validator (line 1) | defmodule Shared.Validation.Validator
FILE: apps/shared/lib/wrapper/wrapper_httpoison.ex
class Transport.Shared.Wrapper.HTTPoison (line 1) | defmodule Transport.Shared.Wrapper.HTTPoison
method impl (line 9) | def impl, do: Application.get_env(:transport, :httpoison_impl)
FILE: apps/shared/lib/wrapper/wrapper_req.ex
class Transport.Req (line 31) | defmodule Transport.Req
class Transport.Req.Behaviour (line 1) | defmodule Transport.Req.Behaviour
method impl (line 37) | def impl, do: Application.get_env(:transport, :req_impl, __MODULE__)
class Transport.HTTPClient (line 46) | defmodule Transport.HTTPClient
method get! (line 52) | def get!(url, options) do
method get (line 58) | def get(url, options) do
method setup_cache (line 64) | defp setup_cache(options) do
FILE: apps/shared/lib/wrapper_ex_aws.ex
class Transport.Wrapper.ExAWS (line 1) | defmodule Transport.Wrapper.ExAWS
method impl (line 8) | def impl, do: Application.get_env(:transport, :ex_aws_impl)
FILE: apps/shared/mix.exs
class Shared.MixProject (line 1) | defmodule Shared.MixProject
method project (line 4) | def project do
method elixirc_paths (line 27) | defp elixirc_paths(:test), do: ["lib", "test/support"]
method elixirc_paths (line 28) | defp elixirc_paths(_), do: ["lib"]
method application (line 30) | def application do
method deps (line 37) | defp deps do
FILE: apps/shared/test/data_visualization_test.exs
class Transport.DataVisualizationTest (line 1) | defmodule Transport.DataVisualizationTest
FILE: apps/shared/test/date_time_display_test.exs
class Shared.DateTimeDisplayTest (line 1) | defmodule Shared.DateTimeDisplayTest
FILE: apps/shared/test/hasher_test.exs
class HasherTest (line 1) | defmodule HasherTest
method to_atom_keys (line 45) | defp to_atom_keys(map) do
FILE: apps/shared/test/helpers/helpers_test.exs
class Helpers.HelpersTest (line 1) | defmodule Helpers.HelpersTest
FILE: apps/shared/test/http_stream_v2_test.exs
class HTTPStreamV2.Test (line 1) | defmodule HTTPStreamV2.Test
FILE: apps/shared/test/resource_schema_test.exs
class Transport.Shared.ResourceSchemaTest (line 1) | defmodule Transport.Shared.ResourceSchemaTest
FILE: apps/shared/test/s3_test.exs
class Transport.S3Test (line 1) | defmodule Transport.S3Test
FILE: apps/shared/test/support/cache_case.ex
class Shared.CacheCase (line 1) | defmodule Shared.CacheCase
FILE: apps/shared/test/support/s3_test_utils.ex
class Transport.Test.S3TestUtils (line 1) | defmodule Transport.Test.S3TestUtils
method s3_mock_list_buckets (line 11) | def s3_mock_list_buckets(bucket_names \\ []) do
method s3_mock_stream_file (line 22) | def s3_mock_stream_file(start_path: expected_start_path, bucket: expec...
method s3_mock_stream_file (line 35) | def s3_mock_stream_file(
method s3_mock_stream_file (line 54) | def s3_mock_stream_file(
method s3_mocks_delete_object (line 73) | def s3_mocks_delete_object(expected_bucket, expected_path) do
method s3_mocks_remote_copy_file (line 85) | def s3_mocks_remote_copy_file(expected_bucket, expected_src_path, expe...
FILE: apps/shared/test/support/test_utils.ex
class Transport.Test.TestUtils (line 1) | defmodule Transport.Test.TestUtils
method wait_until (line 10) | def wait_until(fun, timeout_ms \\ 2_000, interval_ms \\ 10) do
method do_wait_until (line 15) | defp do_wait_until(fun, deadline, interval_ms, timeout_ms) do
method ensure_no_tmp_files! (line 28) | def ensure_no_tmp_files!(file_prefix) do
method zip_metadata (line 37) | def zip_metadata do
FILE: apps/shared/test/time_wrapper_test.exs
class TimeWrapperTest (line 1) | defmodule TimeWrapperTest
FILE: apps/shared/test/validation/gbfs_validator_test.exs
class GBFSValidatorTest (line 1) | defmodule GBFSValidatorTest
FILE: apps/shared/test/validation/gtfs_validator_test.exs
class GtfsValidatorTest (line 1) | defmodule GtfsValidatorTest
method assert_validation_report_is (line 43) | defp assert_validation_report_is({:ok, obtained_validation_report}, ex...
method create_gtfs (line 46) | defp create_gtfs, do: File.read!("#{__DIR__}/gtfs.zip")
method expect_validator_called_with_gtfs_and_return_report (line 48) | defp expect_validator_called_with_gtfs_and_return_report(_gtfs, expect...
method expect_validator_called_with_gtfs_url_and_return_report (line 58) | defp expect_validator_called_with_gtfs_url_and_return_report(gtfs_url,...
FILE: apps/transport/client/javascripts/app.js
method mounted (line 8) | mounted() {
method updated (line 11) | updated() {
method mounted (line 31) | mounted() {
FILE: apps/transport/client/javascripts/autocomplete.js
method keydown (line 77) | keydown(event) {
FILE: apps/transport/client/javascripts/dataset-map.js
function initilizeMap (line 4) | function initilizeMap(id) {
function createDatasetMap (line 12) | function createDatasetMap(divId, datasetDatagouvId) {
FILE: apps/transport/client/javascripts/explore.js
constant DEFAULT_LAT (line 24) | const DEFAULT_LAT = 48.8575
constant DEFAULT_LNG (line 25) | const DEFAULT_LNG = 2.3514
constant DEFAULT_ZOOM (line 26) | const DEFAULT_ZOOM = 6
function getMapParamsFromUrlPath (line 28) | function getMapParamsFromUrlPath() {
function prepareGTFSRTLayer (line 61) | function prepareGTFSRTLayer(layerData) {
function getTooltip (line 89) | function getTooltip({ object, layer }) {
function getLayers (line 120) | function getLayers(layers) {
function withQueryParams (line 124) | function withQueryParams(alter) {
function setQueryFlag (line 132) | function setQueryFlag(key) {
function setQueryParam (line 136) | function setQueryParam(key, value) {
function unsetQueryFlag (line 140) | function unsetQueryFlag(key) {
function startGTFSRT (line 156) | function startGTFSRT() {
function stopGTFSRT (line 170) | function stopGTFSRT() {
function startBNLC (line 189) | function startBNLC() {
function startZFE (line 205) | function startZFE() {
function startIRVE (line 221) | function startIRVE() {
function startGBFS (line 237) | function startGBFS() {
function updateBNLCLayer (line 259) | function updateBNLCLayer(geojson) {
function updateZFELayer (line 263) | function updateZFELayer(geojson) {
function updateIRVELayer (line 267) | function updateIRVELayer(geojson) {
function updateGBFSStationsLayer (line 271) | function updateGBFSStationsLayer(geojson) {
function trackEvent (line 276) | function trackEvent(layer) {
function createPointsLayer (line 284) | function createPointsLayer(geojson, id) {
function updateUrl (line 310) | function updateUrl() {
FILE: apps/transport/client/javascripts/gtfs.js
constant DEFAULT_LAT (line 9) | const DEFAULT_LAT = 48.8575
constant DEFAULT_LNG (line 10) | const DEFAULT_LNG = 2.3514
constant DEFAULT_ZOOM (line 11) | const DEFAULT_ZOOM = 6
function getMapParamsFromUrlPath (line 13) | function getMapParamsFromUrlPath() {
function showPopup (line 40) | function showPopup(info) {
function updateUrl (line 187) | function updateUrl() {
FILE: apps/transport/client/javascripts/map-config.js
constant IGN (line 11) | const IGN = {
FILE: apps/transport/client/javascripts/map-geojson.js
function initializeMap (line 4) | function initializeMap(id) {
function TransitLinesStyle (line 13) | function TransitLinesStyle(feature) {
function createStopsMarkers (line 21) | function createStopsMarkers(_geoJsonPoint, latlng) {
function setZoomEvents (line 25) | function setZoomEvents(map, fg) {
function GeojsonMap (line 35) | function GeojsonMap(fillMapFunction, mapDivId, infoDivId, geojsonUrl, fi...
function TransitMap (line 58) | function TransitMap(mapDivId, geojsonUrl) {
function GenericLinesStyle (line 106) | function GenericLinesStyle(_feature) {
function createPointsMarkers (line 110) | function createPointsMarkers(_geoJsonPoint, latlng) {
function formatPopupContent (line 114) | function formatPopupContent(content) {
function GenericMap (line 118) | function GenericMap(mapDivId, geojsonUrl) {
function TransitGeojsonMap (line 151) | function TransitGeojsonMap(mapDivId, infoDivId, geojsonUrl, filesize = 0...
function GenericGeojsonMap (line 155) | function GenericGeojsonMap(mapDivId, infoDivId, geojsonUrl, filesize = 0...
FILE: apps/transport/client/javascripts/map.js
function getAomsFG (line 52) | function getAomsFG(featureFunction, style, filter = null) {
function displayVehiclesSharing (line 72) | function displayVehiclesSharing(map, featureFunction) {
function displayQuality (line 93) | function displayQuality(featureFunction, style) {
function searchURL (line 111) | function searchURL(insee) {
function addStaticPTMapAOMS (line 118) | function addStaticPTMapAOMS(id, view) {
function addStaticPTUpToDate (line 166) | function addStaticPTUpToDate(id, view) {
function addStaticPTQuality (line 228) | function addStaticPTQuality(id, view) {
function addRealTimePtFormatMap (line 309) | function addRealTimePtFormatMap(id, view) {
function addPtFormatMap (line 459) | function addPtFormatMap(id, view) {
function addVehiclesSharingMap (line 528) | function addVehiclesSharingMap(id, view) {
FILE: apps/transport/client/javascripts/resource-viz.js
function getLabel (line 9) | function getLabel(obj, labelsList) {
function initilizeMap (line 18) | function initilizeMap(id) {
function coordinatesAreCorrect (line 26) | function coordinatesAreCorrect(lat, lon) {
function displayData (line 30) | function displayData(data, fg, { latField, lonField }) {
function setZoomEvents (line 50) | function setZoomEvents(map, fg) {
function createCSVmap (line 62) | function createCSVmap(id, resourceUrl) {
function setGBFSStationStyle (line 81) | function setGBFSStationStyle(feature, layer, field) {
function setGBFSFreeFloatingStyle (line 107) | function setGBFSFreeFloatingStyle(feature, layer) {
function setGBFSGeofencingStyle (line 135) | function setGBFSGeofencingStyle(feature, layer, globalRules) {
function fillStations (line 171) | function fillStations(stationsGeojson, bikesAvailable, docksAvailable) {
function clearFeatureGroups (line 187) | function clearFeatureGroups(featureGroups) {
function fillFreeFloating (line 193) | function fillFreeFloating(geojson, freeFloating) {
function fillGeofencingZones (line 202) | function fillGeofencingZones(geojson, geoFencingZones) {
function fillGBFSMap (line 215) | function fillGBFSMap(resourceUrl, fg, map, lang, firstCall = false) {
function setGBFSLayersControl (line 255) | function setGBFSLayersControl(fg, map, lang) {
function addCountdownDiv (line 277) | function addCountdownDiv(id, refreshInterval) {
function createGBFSmap (line 288) | function createGBFSmap(id, resourceUrl, lang) {
function createGeojsonMap (line 313) | function createGeojsonMap(id, resourceUrl) {
function removeViz (line 324) | function removeViz(consoleMsg) {
function createMap (line 336) | function createMap(id, resourceUrl, resourceFormat, lang = 'fr') {
FILE: apps/transport/client/javascripts/validation-map.js
function initilizeMap (line 4) | function initilizeMap(id) {
function getColor (line 12) | function getColor(severity) {
function createValidationMap (line 21) | function createValidationMap(divId, dataVis) {
FILE: apps/transport/lib/S3/aggregates_uploader.ex
class Transport.S3.AggregatesUploader (line 1) | defmodule Transport.S3.AggregatesUploader
method upload_aggregate! (line 19) | def upload_aggregate!(file, remote_path, remote_latest_path) do
method with_tmp_file (line 29) | def with_tmp_file(cb) do
method timestamp (line 39) | def timestamp, do: DateTime.utc_now() |> Calendar.strftime("%Y%m%d.%H%...
method mk_tmp_file (line 41) | defp mk_tmp_file do
method sha256! (line 49) | defp sha256!(file, checksum_file) do
method upload_files! (line 64) | defp upload_files!(file, checksum_file, remote_path) do
method update_latest_files! (line 73) | defp update_latest_files!({remote_path, remote_checksum_path}, remote_...
method checksum_filename (line 82) | defp checksum_filename(base_filename) do
method stream_upload! (line 86) | defp stream_upload!(file, filename) do
method copy! (line 90) | defp copy!(s3_path, filename) do
FILE: apps/transport/lib/S3/unzip.ex
class Transport.Unzip.S3 (line 10) | defmodule Transport.Unzip.S3
class Transport.Unzip.S3.Behaviour (line 1) | defmodule Transport.Unzip.S3.Behaviour
method impl (line 11) | def impl, do: Application.get_env(:transport, :unzip_s3_impl, __MODULE__)
method new (line 23) | def new(path, bucket, s3_config) do
method aws_s3_config (line 27) | defp aws_s3_config,
method get_file_stream (line 34) | def get_file_stream(file_name, zip_name, bucket_name) do
method get_unzip (line 39) | def get_unzip(zip_name, bucket_name) do
FILE: apps/transport/lib/converters/converter.ex
class Transport.Converters.Converter (line 1) | defmodule Transport.Converters.Converter
FILE: apps/transport/lib/data_frame/requiredness_processing.ex
class Transport.DataFrame.RequirednessProcessing (line 1) | defmodule Transport.DataFrame.RequirednessProcessing
method wrap_with_requiredness (line 12) | def wrap_with_requiredness(input_values_series, validation_series, req...
method wrap_with_requiredness (line 19) | def wrap_with_requiredness(input_values_series, validation_series, req...
FILE: apps/transport/lib/data_frame/validation_primitives.ex
class Transport.DataFrame.Validation.Primitives (line 1) | defmodule Transport.DataFrame.Validation.Primitives
method has_value (line 42) | def has_value(series) do
method simple_email_pattern (line 71) | def simple_email_pattern, do: @simple_email_pattern
method email? (line 86) | def email?(series) do
method boolean_value? (line 120) | def boolean_value?(series) do
method integer_value? (line 138) | def integer_value?(series) do
method numeric? (line 160) | def numeric?(series) do
method date? (line 207) | def date?(series, "%Y-%m-%d" = _format) do
method geopoint? (line 235) | def geopoint?(series, "array" = _format) do
FILE: apps/transport/lib/data_screens/data_screens.ex
class Transport.Screens (line 1) | defmodule Transport.Screens
method resources (line 7) | def resources do
method resources_with_duplicate_datagouv_id (line 12) | def resources_with_duplicate_datagouv_id do
method resources_with_duplicate_datagouv_id (line 19) | def resources_with_duplicate_datagouv_id(markdown: true) do
FILE: apps/transport/lib/datagouvfr/authentication.ex
class Datagouvfr.Authentication (line 20) | defmodule Datagouvfr.Authentication
class Datagouvfr.Authentication.Wrapper (line 1) | defmodule Datagouvfr.Authentication.Wrapper
method impl (line 7) | def impl, do: Application.get_env(:transport, :authentication_impl)
class Datagouvfr.Authentication.Dummy (line 10) | defmodule Datagouvfr.Authentication.Dummy
method get_token! (line 17) | def get_token!(_), do: %{token: "token"}
method client (line 33) | def client(token \\ nil) do
method authorize_url (line 42) | def authorize_url do
method get_token! (line 49) | def get_token!(params \\ []) do
method authorize_url (line 57) | def authorize_url(client, params) do
method get_token (line 63) | def get_token(client, params, headers) do
FILE: apps/transport/lib/datagouvfr/client.ex
class Datagouvfr.Client (line 1) | defmodule Datagouvfr.Client
FILE: apps/transport/lib/datagouvfr/client/api.ex
class Datagouvfr.Client.API (line 1) | defmodule Datagouvfr.Client.API
method http_client (line 14) | defp http_client, do: Application.fetch_env!(:transport, :httpoison_impl)
method api_key_headers (line 16) | def api_key_headers do
method decode_body (line 21) | def decode_body({:ok, %HTTPoison.Response{body: "", status_code: statu...
method decode_body (line 32) | def decode_body({:error, %HTTPoison.Error{} = error}) do
method post (line 42) | def post(path, body, headers, blank \\ false)
method request (line 79) | def request(method, path, body \\ "", headers \\ [], options \\ []) do
method request_url (line 91) | defp request_url(method, url, body \\ "", headers \\ [], options \\ []...
method perform_request (line 100) | def perform_request(method, url, body, headers, options) do
method perform_request (line 104) | def perform_request(_method, _url, _body, _headers, _options, 0) do
method stream (line 120) | def stream(path, method \\ :get) do
method fetch_all_pages! (line 144) | def fetch_all_pages!(path, method \\ :get) do
method fetch_all_pages (line 166) | def fetch_all_pages(path, method \\ :get) do
FILE: apps/transport/lib/datagouvfr/client/community_resources.ex
class Datagouvfr.Client.CommunityResources (line 1) | defmodule Datagouvfr.Client.CommunityResources
method impl (line 11) | defp impl, do: Application.get_env(:transport, :community_resources_impl)
method get (line 12) | def get(dataset_id), do: impl().get(dataset_id)
method delete (line 13) | def delete(dataset_id, resource_id), do: impl().delete(dataset_id, res...
class Datagouvfr.Client.CommunityResources.API (line 16) | defmodule Datagouvfr.Client.CommunityResources.API
method delete (line 33) | def delete(dataset_datagouv_id, resource_datagouv_id) do
class Datagouvfr.Client.StubCommunityResources (line 41) | defmodule Datagouvfr.Client.StubCommunityResources
method get (line 47) | def get(_dataset_id) do
method delete (line 51) | def delete(_dataset_id, _resource_id) do
FILE: apps/transport/lib/datagouvfr/client/datasets.ex
class Datagouvfr.Client.Datasets (line 1) | defmodule Datagouvfr.Client.Datasets
method new (line 69) | def new(map), do: impl().new(map)
method get_id_from_url (line 72) | def get_id_from_url(url), do: impl().get_id_from_url(url)
method get_infos_from_url (line 75) | def get_infos_from_url(url), do: impl().get_infos_from_url(url)
method get_followers (line 81) | def get_followers(dataset_id), do: impl().get_followers(dataset_id)
method get (line 88) | def get(id), do: impl().get(id)
method impl (line 90) | defp impl, do: Application.get_env(:transport, :datasets_impl)
class Datagouvfr.Client.Datasets.External (line 93) | defmodule Datagouvfr.Client.Datasets.External
method new (line 107) | def new(%{} = map) do
method get_id_from_url (line 114) | def get_id_from_url(url) do
method get_infos_from_url (line 122) | def get_infos_from_url(url) do
method get_followers (line 135) | def get_followers(dataset_id) do
method get (line 146) | def get(id) do
method accumulator_atomizer (line 153) | def accumulator_atomizer({key, value}, m) do
method keys (line 158) | defp keys do
FILE: apps/transport/lib/datagouvfr/client/discussions.ex
class Datagouvfr.Client.Discussions (line 36) | defmodule Datagouvfr.Client.Discussions
class Datagouvfr.Client.Discussions.Wrapper (line 1) | defmodule Datagouvfr.Client.Discussions.Wrapper
method get (line 8) | def get(id), do: impl().get(id)
method post (line 11) | def post(%Plug.Conn{} = conn, discussion_id, comment, close: close),
method post (line 15) | def post(%Plug.Conn{} = conn, dataset_id, title, comment), do: impl(...
method impl (line 17) | defp impl, do: Application.fetch_env!(:transport, :datagouvfr_discus...
class Datagouvfr.Client.Discussions.Dummy (line 20) | defmodule Datagouvfr.Client.Discussions.Dummy
method get (line 27) | def get(_), do: []
method post (line 30) | def post(%Plug.Conn{}, _, _, close: _), do: {:ok, nil}
method post (line 33) | def post(%Plug.Conn{}, _, _, _), do: {:ok, nil}
method post (line 55) | def post(%Plug.Conn{} = conn, discussion_id, comment) do
method post (line 61) | def post(%Plug.Conn{} = conn, discussion_id, comment, close: close) do
method get (line 90) | def get(id) do
FILE: apps/transport/lib/datagouvfr/client/oauth.ex
class Datagouvfr.Client.OAuth (line 1) | defmodule Datagouvfr.Client.OAuth
method get (line 15) | def get(conn_or_token, path, headers \\ [], opts \\ []) do
method post (line 20) | def post(%Plug.Conn{} = conn, path, body \\ "", headers \\ [], opts \\...
method put (line 27) | def put(%Plug.Conn{} = conn, path, body \\ "", headers \\ [], opts \\ ...
method delete (line 34) | def delete(%Plug.Conn{} = conn, path, headers \\ [], opts \\ []) do
method request (line 49) | def request(method, conn_or_token, path, body, headers, opts) do
method get_client (line 67) | def get_client(%Plug.Conn{} = conn) do
method get_client (line 74) | def get_client(%OAuth2.AccessToken{} = token), do: Authentication.clie...
FILE: apps/transport/lib/datagouvfr/client/organization.ex
class Datagouvfr.Client.Organization (line 13) | defmodule Datagouvfr.Client.Organization
class Datagouvfr.Client.Organization.Wrapper (line 1) | defmodule Datagouvfr.Client.Organization.Wrapper
method get (line 7) | def get(id, opts), do: impl().get(id, opts)
method get (line 8) | def get(id), do: impl().get(id)
method impl (line 10) | defp impl, do: Application.get_env(:transport, :organization_impl)
method get (line 25) | def get(id, opts \\ []) do
FILE: apps/transport/lib/datagouvfr/client/resources.ex
class Datagouvfr.Client.Resources (line 1) | defmodule Datagouvfr.Client.Resources
method update (line 12) | def update(map), do: impl().update(map)
method update (line 18) | def update(conn, map), do: impl().update(conn, map)
method get (line 21) | def get(%{"resource_id" => _id} = params), do: impl().get(params)
method get (line 23) | def get(_), do: %{}
method delete (line 26) | def delete(%Plug.Conn{} = conn, %{"dataset_id" => _, "resource_id" => ...
method impl (line 28) | def impl, do: Application.get_env(:transport, :resources_impl)
class Datagouvfr.Client.Resources.External (line 31) | defmodule Datagouvfr.Client.Resources.External
method update (line 51) | def update(
method update (line 70) | def update(conn, %{"resource_file" => _file} = params) do
method update (line 90) | def update(conn, %{"resource_id" => _} = params) do
method update (line 116) | def update(conn, %{"url" => _url, "dataset_id" => dataset_id} = para...
method get (line 133) | def get(%{"resource_id" => _id} = params) do
method delete (line 143) | def delete(%Plug.Conn{} = conn, %{"dataset_id" => _, "resource_id" =...
method put_mime (line 148) | defp put_mime(payload, params) do
method upload_query (line 157) | defp upload_query(conn, %{"resource_file" => %{path: filepath, filen...
method upload_query (line 166) | defp upload_query(_conn, _), do: {:error, "no file to upload"}
method make_path (line 169) | defp make_path(params, suffix \\ [])
method make_path (line 171) | defp make_path(%{"dataset_id" => d_id, "resource_id" => r_id}, suffix),
method make_path (line 174) | defp make_path(%{"dataset_id" => d_id}, suffix), do: Path.join(["dat...
method multipart_upload (line 176) | defp multipart_upload(filepath, filename) do
method remove_special_characters (line 197) | def remove_special_characters(value) do
FILE: apps/transport/lib/datagouvfr/client/reuses.ex
class Datagouvfr.Client.Reuses (line 22) | defmodule Datagouvfr.Client.Reuses
class Datagouvfr.Client.Reuses.Wrapper (line 1) | defmodule Datagouvfr.Client.Reuses.Wrapper
method impl (line 7) | def impl, do: Application.fetch_env!(:transport, :datagouvfr_reuses)
method get (line 9) | def get(dataset), do: impl().get(dataset)
class Datagouvfr.Client.Reuses.Dummy (line 12) | defmodule Datagouvfr.Client.Reuses.Dummy
method get (line 19) | def get(_), do: {:ok, []}
method get (line 33) | def get(%{datagouv_id: dataset_id}) do
method add_name (line 47) | defp add_name(%{"owner" => nil} = reuse), do: reuse |> Map.put("owner"...
method add_name (line 48) | defp add_name(reuse), do: put_in(reuse, ["owner", "name"], get_name(re...
method get_name (line 51) | defp get_name(%{"owner" => %{"name" => name}}), do: name
method get_name (line 52) | defp get_name(%{"organization" => %{"name" => name}}), do: name
method get_name (line 53) | defp get_name(%{"owner" => %{"first_name" => f_n, "last_name" => l_n}}...
method get_name (line 54) | defp get_name(reuse), do: reuse
FILE: apps/transport/lib/datagouvfr/client/user.ex
class Datagouvfr.Client.User (line 38) | defmodule Datagouvfr.Client.User
class Datagouvfr.Client.User.Wrapper (line 1) | defmodule Datagouvfr.Client.User.Wrapper
method impl (line 7) | def impl, do: Application.get_env(:transport, :user_impl)
class Datagouvfr.Client.User.Dummy (line 10) | defmodule Datagouvfr.Client.User.Dummy
method me (line 17) | def me(_),
method me (line 54) | def me(conn_or_token, exclude_fields \\ []) do
method get (line 63) | def get(id) do
method xfields (line 70) | defp xfields(exclude_fields) do
FILE: apps/transport/lib/db/administrative_division.ex
class DB.AdministrativeDivision (line 1) | defmodule DB.AdministrativeDivision
method changeset (line 55) | def changeset(administrative_division, attrs) do
method validate_type_insee_is_consistent (line 63) | def validate_type_insee_is_consistent(changeset) do
method load_searchable_administrative_divisions (line 82) | def load_searchable_administrative_divisions do
method search (line 86) | def search(territoires, term) do
method names (line 97) | def names(territories) do
method sorted (line 111) | def sorted(territories) do
method display_type (line 131) | def display_type(%DB.AdministrativeDivision{type: :commune}), do: dget...
method display_type (line 133) | def display_type(%DB.AdministrativeDivision{type: :departement}),
method display_type (line 136) | def display_type(%DB.AdministrativeDivision{type: :epci}), do: dgettex...
method display_type (line 137) | def display_type(%DB.AdministrativeDivision{type: :region}), do: dgett...
method display_type (line 138) | def display_type(%DB.AdministrativeDivision{type: :pays}), do: dgettex...
FILE: apps/transport/lib/db/aom.ex
class DB.AOM (line 1) | defmodule DB.AOM
method get (line 30) | def get(insee_commune_principale: nil), do: nil
method get (line 31) | def get(insee_commune_principale: insee), do: Repo.get_by(AOM, insee_c...
FILE: apps/transport/lib/db/api_request.ex
class DB.APIRequest (line 1) | defmodule DB.APIRequest
FILE: apps/transport/lib/db/autocomplete.ex
class DB.Autocomplete (line 1) | defmodule DB.Autocomplete
FILE: apps/transport/lib/db/breaking_news.ex
class DB.BreakingNews (line 1) | defmodule DB.BreakingNews
method get_breaking_news (line 15) | def get_breaking_news do
method set_breaking_news (line 26) | def set_breaking_news(%{msg: ""}) do
method set_breaking_news (line 31) | def set_breaking_news(%{level: level, msg: msg}) do
FILE: apps/transport/lib/db/commune.ex
class DB.Commune (line 1) | defmodule DB.Commune
FILE: apps/transport/lib/db/company.ex
class DB.Company (line 1) | defmodule DB.Company
method changeset (line 40) | def changeset(struct, attrs \\ %{}) do
FILE: apps/transport/lib/db/contact.ex
class DB.Contact (line 1) | defmodule DB.Contact
method base_query (line 57) | def base_query, do: from(c in __MODULE__, as: :contact)
method search (line 59) | def search(%{"q" => q}) do
method search (line 77) | def search(%{}), do: base_query()
method safe_like_pattern (line 86) | def safe_like_pattern(value) do
method insert! (line 90) | def insert!(%{} = fields), do: %__MODULE__{} |> changeset(fields) |> D...
method display_name (line 98) | def display_name(%__MODULE__{first_name: first_name, last_name: last_n...
method human? (line 111) | def human?(%__MODULE__{mailing_list_title: title}), do: is_nil(title)
method mailing_list? (line 119) | def mailing_list?(%__MODULE__{} = object), do: !human?(object)
method changeset (line 121) | def changeset(struct, attrs \\ %{}) do
method save_organizations (line 158) | defp save_organizations(%Ecto.Changeset{} = changeset, %{}), do: chang...
method cast_organization (line 162) | defp cast_organization(%Ecto.Changeset{} = changeset) do
method organization_name (line 182) | def organization_name([]), do: @default_org_name
method organization_name (line 184) | def organization_name(orgs) do
method organizations (line 194) | defp organizations(%{"organizations" => orgs}), do: orgs
method organizations (line 195) | defp organizations(%{organizations: orgs}), do: orgs
method find_org (line 197) | defp find_org(%{"id" => id}), do: DB.Repo.get(DB.Organization, id) || ...
method find_org (line 198) | defp find_org(%{id: id}), do: DB.Repo.get(DB.Organization, id) || %DB....
method find_org (line 199) | defp find_org(%{}), do: %DB.Organization{}
method validate_names_or_mailing_list_title (line 201) | defp validate_names_or_mailing_list_title(%Ecto.Changeset{} = changese...
method trim_fields (line 217) | defp trim_fields(%Ecto.Changeset{} = changeset, fields) do
method capitalize_fields (line 223) | defp capitalize_fields(%Ecto.Changeset{} = changeset, fields) do
method title_case (line 245) | def title_case(string) do
method capitalize_per_word (line 249) | defp capitalize_per_word(string, split_join_char) do
method uppercase_first (line 255) | defp uppercase_first(string) do
method cast_phone_numbers (line 261) | defp cast_phone_numbers(%Ecto.Changeset{} = changeset) do
method parse_phone_number (line 280) | defp parse_phone_number(%Ecto.Changeset{} = changeset, %ExPhoneNumber....
method lowercase_email (line 293) | defp lowercase_email(%Ecto.Changeset{} = changeset) do
method put_hashed_fields (line 297) | defp put_hashed_fields(%Ecto.Changeset{} = changeset) do
method admin_contact_ids (line 310) | def admin_contact_ids do
method admin_datagouv_ids (line 324) | def admin_datagouv_ids do
method regulator_datagouv_ids (line 338) | def regulator_datagouv_ids do
method admin_contacts (line 350) | def admin_contacts do
method regulator_contacts (line 364) | def regulator_contacts do
method list_inactive_contacts (line 378) | def list_inactive_contacts(%DateTime{} = threshold) do
method delete_inactive_contacts (line 389) | def delete_inactive_contacts(%DateTime{} = threshold) do
FILE: apps/transport/lib/db/data_conversion.ex
class DB.DataConversion (line 1) | defmodule DB.DataConversion
method base_query (line 21) | def base_query, do: from(dc in DB.DataConversion, as: :data_conversion)
method available_conversion_formats (line 26) | def available_conversion_formats,
method converter_to_use (line 44) | def converter_to_use(convert_from, convert_to) do
method join_resource_history_with_data_conversion (line 56) | def join_resource_history_with_data_conversion(%Ecto.Query{} = query, ...
method default_converters (line 70) | defp default_converters(convert_froms, convert_tos) do
method latest_data_conversions (line 79) | def latest_data_conversions(dataset_id, convert_to) do
method delete_data_conversions (line 93) | def delete_data_conversions(conversions) do
FILE: apps/transport/lib/db/data_import.ex
class DB.DataImport (line 1) | defmodule DB.DataImport
FILE: apps/transport/lib/db/data_import_batch.ex
class DB.DataImportBatch (line 1) | defmodule DB.DataImportBatch
FILE: apps/transport/lib/db/dataset.ex
class DB.Dataset (line 1) | defmodule DB.Dataset
method base_query (line 101) | def base_query do
method all_datasets (line 105) | def all_datasets, do: from(d in DB.Dataset, as: :dataset)
method archived (line 106) | def archived, do: base_query() |> where([dataset: d], not is_nil(d.arc...
method inactive (line 107) | def inactive, do: from(d in DB.Dataset, as: :dataset, where: not d.is_...
method hidden (line 108) | def hidden, do: from(d in DB.Dataset, as: :dataset, where: d.is_active...
method include_hidden_datasets (line 109) | def include_hidden_datasets(%Ecto.Query{} = query), do: or_where(query...
method base_with_hidden_datasets (line 110) | def base_with_hidden_datasets, do: base_query() |> include_hidden_data...
method reject_archived_datasets (line 111) | def reject_archived_datasets(%Ecto.Query{} = query), do: where(query, ...
method archived? (line 114) | def archived?(%__MODULE__{archived_at: nil}), do: false
method archived? (line 115) | def archived?(%__MODULE__{archived_at: %DateTime{}}), do: true
method active? (line 118) | def active?(%__MODULE__{is_active: is_active}), do: is_active
method join_from_dataset_to_metadata (line 125) | def join_from_dataset_to_metadata(query, validator_name) do
method last_resource_history (line 136) | def last_resource_history(dataset_id) do
method type_to_str_map (line 155) | def type_to_str_map,
method type_to_str (line 170) | def type_to_str(type), do: type_to_str_map()[type]
method subtype_to_str (line 173) | def subtype_to_str(subtype) do
method types (line 191) | def types, do: Map.keys(type_to_str_map())
method no_validations_query (line 194) | defp no_validations_query do
method preload_without_validations (line 219) | defp preload_without_validations do
method preload_without_validations (line 225) | defp preload_without_validations(query) do
method preload_legal_owners (line 230) | defp preload_legal_owners(query) do
method filter_by_fulltext (line 240) | def filter_by_fulltext(query, %{"q" => ""}), do: query
method filter_by_fulltext (line 242) | def filter_by_fulltext(query, %{"q" => q}) do
method filter_by_fulltext (line 250) | def filter_by_fulltext(query, _), do: query
method filter_by_region (line 253) | defp filter_by_region(query, %{"region" => region}) do
method filter_by_region (line 261) | defp filter_by_region(query, _), do: query
method filter_by_departement (line 264) | def filter_by_departement(query, %{"departement" => insee}) do
method filter_by_departement (line 317) | def filter_by_departement(query, _), do: query
method filter_by_category (line 320) | defp filter_by_category(query, %{"filter" => filter_key}) do
method filter_by_category (line 327) | defp filter_by_category(query, _), do: query
method filter_by_custom_tag (line 334) | def filter_by_custom_tag(%Ecto.Query{} = query, %{"custom_tag" => cust...
method filter_by_custom_tag (line 337) | def filter_by_custom_tag(%Ecto.Query{} = query, _), do: query
method filter_by_feature (line 361) | def filter_by_feature(query, %{"features" => feature}) do
method filter_by_feature (line 367) | def filter_by_feature(query, _), do: query
method filter_by_mode (line 376) | defp filter_by_mode(query, _), do: query
method filter_by_resource_format (line 379) | defp filter_by_resource_format(query, %{"format" => format}) do
method filter_by_resource_format (line 385) | defp filter_by_resource_format(query, _), do: query
method filter_by_type (line 388) | defp filter_by_type(query, %{"type" => type}), do: where(query, [d], d...
method filter_by_type (line 389) | defp filter_by_type(query, _), do: query
method filter_by_subtype (line 392) | defp filter_by_subtype(query, %{"subtype" => subtype}) do
method filter_by_subtype (line 398) | defp filter_by_subtype(query, _), do: query
method filter_by_epci (line 401) | def filter_by_epci(query, %{"epci" => epci}) do
method filter_by_epci (line 456) | def filter_by_epci(query, _), do: query
method filter_by_commune (line 459) | def filter_by_commune(query, %{"commune" => commune}) do
method filter_by_commune (line 511) | def filter_by_commune(query, _), do: query
method filter_by_offer (line 513) | defp filter_by_offer(query, %{"identifiant_offre" => identifiant_offre...
method filter_by_offer (line 519) | defp filter_by_offer(query, _), do: query
method filter_by_licence (line 522) | defp filter_by_licence(query, %{"licence" => "licence-ouverte"}),
method filter_by_licence (line 525) | defp filter_by_licence(query, %{"licence" => licence}), do: where(quer...
method filter_by_licence (line 526) | defp filter_by_licence(query, _), do: query
method filter_by_organization (line 529) | defp filter_by_organization(query, %{"organization_id" => organization...
method filter_by_organization (line 533) | defp filter_by_organization(query, _), do: query
method list_datasets (line 536) | def list_datasets(%{} = params) do
method list_datasets_no_order (line 543) | def list_datasets_no_order(%{} = params) do
method order_datasets (line 571) | def order_datasets(datasets, %{"order_by" => "alpha"}), do: order_by(d...
method order_datasets (line 572) | def order_datasets(datasets, %{"order_by" => "most_recent"}), do: orde...
method order_datasets (line 574) | def order_datasets(datasets, %{"q" => q}),
method order_datasets (line 581) | def order_datasets(datasets, _params) do
method changeset (line 609) | def changeset(_) do
method get_dataset (line 615) | def get_dataset(query, %{"dataset_id" => dataset_id}) do
method apply_changeset (line 623) | defp apply_changeset(%__MODULE__{} = dataset, params) do
method validate_spatial_area_overlap (line 708) | defp validate_spatial_area_overlap(%Ecto.Changeset{} = changeset, admi...
method add_organization (line 724) | defp add_organization(%Ecto.Changeset{} = changeset, %{"organization" ...
method add_organization (line 736) | defp add_organization(%Ecto.Changeset{} = changeset, _), do: changeset
method maybe_set_custom_logo_changed_at (line 738) | defp maybe_set_custom_logo_changed_at(%Ecto.Changeset{} = changeset) do
method get_legal_owners_aom (line 746) | defp get_legal_owners_aom(dataset, params) do
method get_legal_owners_region (line 763) | defp get_legal_owners_region(dataset, params) do
method get_administrative_divisions (line 777) | defp get_administrative_divisions(dataset, params) do
method get_offers (line 791) | defp get_offers(dataset, params) do
method get_dataset_subtypes (line 805) | defp get_dataset_subtypes(dataset, params) do
method format_error (line 820) | defp format_error(changeset), do: "#{inspect(Ecto.Changeset.traverse_e...
method link_to_datagouv (line 823) | def link_to_datagouv(%__MODULE__{} = dataset) do
method datagouv_url (line 833) | def datagouv_url(%__MODULE__{slug: slug}) do
method count_by_mode (line 838) | def count_by_mode(mode) do
method count_coach (line 844) | def count_coach do
method count_by_mode_query (line 851) | defp count_by_mode_query(mode) do
method count_by_type (line 859) | def count_by_type(type) do
method count_by_type (line 867) | def count_by_type, do: for(type <- __MODULE__.types(), into: %{}, do: ...
method count_public_transport_has_realtime (line 870) | def count_public_transport_has_realtime do
method count_by_custom_tag (line 878) | def count_by_custom_tag(custom_tag) do
method get_by_slug (line 883) | def get_by_slug(slug) do
method get_covered_area (line 901) | def get_covered_area(%__MODULE__{declarative_spatial_areas: declarativ...
method get_covered_area_or_nil (line 906) | def get_covered_area_or_nil(%__MODULE__{} = d) do
method official_resources (line 914) | def official_resources(%__MODULE__{resources: resources}),
method official_resources (line 917) | def official_resources(%__MODULE__{}), do: []
method community_resources (line 920) | def community_resources(%__MODULE__{resources: resources}),
method community_resources (line 923) | def community_resources(%__MODULE__{}), do: []
method formats (line 926) | def formats(%__MODULE__{} = dataset) do
method formats (line 935) | def formats(_), do: []
method validate (line 939) | def validate(d), do: validate(d, force_validation: false)
method validate (line 941) | def validate(%__MODULE__{id: id}, opt), do: validate(id, opt)
method datasets_for_user (line 978) | def datasets_for_user(conn_or_token) do
method get_resources_related_files (line 1034) | def get_resources_related_files(_), do: %{}
method target_conversion_formats (line 1042) | def target_conversion_formats(%__MODULE__{}) do
method validate_siren (line 1046) | defp validate_siren(%Ecto.Changeset{} = changeset) do
method validate_organization_type (line 1061) | defp validate_organization_type(changeset) do
method maybe_overwrite_licence (line 1072) | defp maybe_overwrite_licence(%Ecto.Changeset{} = changeset) do
method has_real_time (line 1082) | defp has_real_time(changeset) do
method set_is_hidden (line 1087) | defp set_is_hidden(%Ecto.Changeset{} = changeset) do
method set_population (line 1095) | defp set_population(%Ecto.Changeset{} = changeset, administrative_divi...
method resources_content_updated_at (line 1101) | def resources_content_updated_at(%__MODULE__{id: dataset_id}) do
method should_skip_history? (line 1119) | def should_skip_history?(%__MODULE__{type: type} = dataset) do
method has_licence_ouverte? (line 1123) | def has_licence_ouverte?(%__MODULE__{licence: licence}), do: licence i...
method climate_resilience_bill? (line 1131) | def climate_resilience_bill?(%__MODULE__{} = dataset), do: has_custom_...
method has_custom_tag? (line 1141) | def has_custom_tag?(%__MODULE__{custom_tags: custom_tags}, tag_name), ...
method logo (line 1150) | def logo(%__MODULE__{logo: logo, custom_logo: custom_logo}), do: custo...
method full_logo (line 1159) | def full_logo(%__MODULE__{full_logo: full_logo, custom_full_logo: cust...
method experimental? (line 1168) | def experimental?(%__MODULE__{} = dataset), do: has_custom_tag?(datase...
method reject_experimental_datasets (line 1170) | def reject_experimental_datasets(queryable) do
method has_subtype? (line 1174) | def has_subtype?(%DB.Dataset{} = dataset, slug) do
FILE: apps/transport/lib/db/dataset_follower.ex
class DB.DatasetFollower (line 1) | defmodule DB.DatasetFollower
method base_query (line 18) | def base_query, do: from(df in __MODULE__, as: :dataset_follower)
method changeset (line 20) | def changeset(%__MODULE__{} = struct, attrs \\ %{}) do
method follows_dataset? (line 30) | def follows_dataset?(nil, %DB.Dataset{}), do: false
method follows_dataset? (line 32) | def follows_dataset?(%DB.Contact{id: contact_id}, %DB.Dataset{id: data...
method follow! (line 39) | def follow!(%DB.Contact{id: contact_id}, %DB.Dataset{id: dataset_id}, ...
method unfollow! (line 49) | def unfollow!(%DB.Contact{id: contact_id}, %DB.Dataset{id: dataset_id}...
FILE: apps/transport/lib/db/dataset_geographic_view.ex
class DB.DatasetGeographicView (line 1) | defmodule DB.DatasetGeographicView
FILE: apps/transport/lib/db/dataset_history.ex
class DB.DatasetHistory (line 1) | defmodule DB.DatasetHistory
method from_old_dataset_slug (line 18) | def from_old_dataset_slug(slug) do
FILE: apps/transport/lib/db/dataset_history_resources.ex
class DB.DatasetHistoryResources (line 1) | defmodule DB.DatasetHistoryResources
FILE: apps/transport/lib/db/dataset_monthly_metric.ex
class DB.DatasetMonthlyMetric (line 1) | defmodule DB.DatasetMonthlyMetric
method changeset (line 28) | def changeset(struct, attrs \\ %{}) do
method downloads_for_year (line 37) | def downloads_for_year(datasets, year) do
method year_months (line 59) | def year_months(year) do
FILE: apps/transport/lib/db/dataset_score.ex
class DB.DatasetScore (line 1) | defmodule DB.DatasetScore
method changeset (line 19) | def changeset(%DB.DatasetScore{} = dataset_score, attrs) do
method between_0_and_1_if_exists (line 27) | def between_0_and_1_if_exists(:score, nil), do: []
method between_0_and_1_if_exists (line 28) | def between_0_and_1_if_exists(:score, _score), do: [score: "must be be...
method base_query (line 30) | def base_query, do: from(ds in DB.DatasetScore, as: :dataset_score)
method score_for_humans (line 92) | def score_for_humans(%__MODULE__{score: nil}), do: nil
method score_for_humans (line 93) | def score_for_humans(%__MODULE__{score: score}), do: Kernel.round(scor...
FILE: apps/transport/lib/db/dataset_subtype.ex
class DB.DatasetSubtype (line 1) | defmodule DB.DatasetSubtype
method changeset (line 19) | def changeset(%__MODULE__{} = struct, attrs \\ %{}) do
FILE: apps/transport/lib/db/default_token.ex
class DB.DefaultToken (line 1) | defmodule DB.DefaultToken
method base_query (line 16) | def base_query, do: from(df in __MODULE__, as: :default_token)
method changeset (line 18) | def changeset(%__MODULE__{} = struct, attrs \\ %{}) do
FILE: apps/transport/lib/db/departement.ex
class DB.Departement (line 1) | defmodule DB.Departement
FILE: apps/transport/lib/db/encrypted/binary.ex
class DB.Encrypted.Binary (line 1) | defmodule DB.Encrypted.Binary
FILE: apps/transport/lib/db/epci.ex
class DB.EPCI (line 1) | defmodule DB.EPCI
method changeset (line 22) | def changeset(epci, attrs) do
method allowed_types (line 30) | defp allowed_types,
method allowed_mode_financement (line 33) | defp allowed_mode_financement, do: ["Fiscalité professionnelle unique"...
FILE: apps/transport/lib/db/feature_usage.ex
class DB.FeatureUsage (line 1) | defmodule DB.FeatureUsage
method insert! (line 32) | def insert!(feature, contact_id, metadata) do
FILE: apps/transport/lib/db/geo_data/geo_data.ex
class DB.GeoData (line 1) | defmodule DB.GeoData
method geo_data_subquery_init (line 15) | defp geo_data_subquery_init(geo_data_import_id) do
method geo_data_as_geojson (line 22) | def geo_data_as_geojson(%{id: geo_data_import_id}, add_fields_func) do
method count_lines_for_geo_data_import (line 37) | def count_lines_for_geo_data_import(nil), do: 0
method count_lines_for_geo_data_import (line 40) | def count_lines_for_geo_data_import(geo_data_import) do
FILE: apps/transport/lib/db/geo_data/geo_data_import.ex
class DB.GeoDataImport (line 1) | defmodule DB.GeoDataImport
FILE: apps/transport/lib/db/gtfs/gtfs_agency.ex
class DB.GTFS.Agency (line 1) | defmodule DB.GTFS.Agency
FILE: apps/transport/lib/db/gtfs/gtfs_calendar.ex
class DB.GTFS.Calendar (line 1) | defmodule DB.GTFS.Calendar
FILE: apps/transport/lib/db/gtfs/gtfs_calendar_dates.ex
class DB.GTFS.CalendarDates (line 1) | defmodule DB.GTFS.CalendarDates
FILE: apps/transport/lib/db/gtfs/gtfs_stop.ex
class DB.GTFS.Stops (line 1) | defmodule DB.GTFS.Stops
FILE: apps/transport/lib/db/gtfs/gtfs_stop_times.ex
class DB.GTFS.StopTimes (line 1) | defmodule DB.GTFS.StopTimes
FILE: apps/transport/lib/db/gtfs/gtfs_trips.ex
class DB.GTFS.Trips (line 1) | defmodule DB.GTFS.Trips
FILE: apps/transport/lib/db/hidden_reuser_alert.ex
class DB.HiddenReuserAlert (line 1) | defmodule DB.HiddenReuserAlert
method base_query (line 29) | def base_query, do: from(hra in __MODULE__, as: :hidden_reuser_alert)
method changeset (line 31) | def changeset(struct, attrs \\ %{}) do
method hide! (line 47) | def hide!(%DB.Contact{id: contact_id}, %DB.Dataset{id: dataset_id}, ch...
method active_hidden_alerts (line 71) | def active_hidden_alerts(%DB.Contact{id: contact_id}) do
method hidden? (line 83) | def hidden?(hidden_alerts, dataset_id, check_type, opts \\ []) do
FILE: apps/transport/lib/db/irve_valid_file.ex
class DB.IRVEValidFile (line 1) | defmodule DB.IRVEValidFile
FILE: apps/transport/lib/db/irve_valid_pdc.ex
class DB.IRVEValidPDC (line 1) | defmodule DB.IRVEValidPDC
method raw_data_to_schema (line 63) | def raw_data_to_schema(raw_data) do
method insert_timestamps (line 69) | def insert_timestamps(data) do
method valid_fields (line 77) | defp valid_fields,
FILE: apps/transport/lib/db/logs_import.ex
class DB.LogsImport (line 1) | defmodule DB.LogsImport
FILE: apps/transport/lib/db/metrics.ex
class DB.Metrics (line 1) | defmodule DB.Metrics
method proxy_requests (line 57) | def proxy_requests(resources) do
FILE: apps/transport/lib/db/multi_validation.ex
class DB.MultiValidation (line 1) | defmodule DB.MultiValidation
method base_query (line 37) | def base_query(opts \\ []) do
method with_result (line 57) | def with_result do
method join_resource_history_with_latest_validation (line 65) | def join_resource_history_with_latest_validation(query, validator) do
method multi_validation_subquery (line 76) | defp multi_validation_subquery(v) do
method filter_on_validator (line 93) | defp filter_on_validator(query, validator_name) do
method already_validated? (line 99) | def already_validated?(%DB.ResourceHistory{id: id}, validator) do
method resource_latest_validation (line 108) | def resource_latest_validation(resource_id, validator, opts \\ [])
method resource_latest_validation (line 110) | def resource_latest_validation(_, nil, _), do: nil
method resource_latest_validations (line 129) | def resource_latest_validations(resource_id, validator, %DateTime{} = ...
method resource_history_latest_validation (line 139) | def resource_history_latest_validation(_, nil), do: nil
method dataset_latest_validation (line 156) | def dataset_latest_validation(dataset_id, validators, opts \\ []) do
method get_metadata_info (line 207) | def get_metadata_info(multi_validation, metadata_key, default \\ nil)
method get_metadata_info (line 209) | def get_metadata_info(%__MODULE__{metadata: %DB.ResourceMetadata{metad...
method get_metadata_info (line 213) | def get_metadata_info(_, _, default), do: default
method get_metadata_modes (line 223) | def get_metadata_modes(multi_validation, default \\ nil)
method get_metadata_modes (line 224) | def get_metadata_modes(%__MODULE__{metadata: %DB.ResourceMetadata{mode...
method get_metadata_modes (line 225) | def get_metadata_modes(_, default), do: default
method outdated? (line 247) | def outdated?(%DB.MultiValidation{} = multi_validation) do
method outdated? (line 260) | def outdated?(_), do: nil
FILE: apps/transport/lib/db/notification.ex
class DB.Notification (line 1) | defmodule DB.Notification
method base_query (line 30) | def base_query, do: from(n in __MODULE__, as: :notification)
method insert! (line 38) | def insert!(
method insert! (line 57) | def insert!(
method changeset (line 108) | def changeset(struct, attrs \\ %{}) do
method put_hashed_fields (line 127) | defp put_hashed_fields(%Ecto.Changeset{} = changeset) do
FILE: apps/transport/lib/db/notification_subscription.ex
class DB.NotificationSubscription (line 1) | defmodule DB.NotificationSubscription
method base_query (line 36) | def base_query, do: from(ns in __MODULE__, as: :notification_subscript...
method join_with_contact (line 38) | def join_with_contact(query) do
method insert (line 43) | def insert(%{} = fields), do: %__MODULE__{} |> changeset(fields) |> DB...
method insert! (line 44) | def insert!(%{} = fields), do: %__MODULE__{} |> changeset(fields) |> D...
method changeset (line 46) | def changeset(struct, attrs \\ %{}) do
method maybe_assoc_constraint_dataset (line 60) | defp maybe_assoc_constraint_dataset(%Ecto.Changeset{} = changeset) do
method subscriptions_for_reason_dataset_and_role (line 73) | def subscriptions_for_reason_dataset_and_role(reason, %DB.Dataset{id: ...
method subscriptions_for_reason_and_role (line 88) | def subscriptions_for_reason_and_role(reason, role) do
method producer_subscriptions_for_datasets (line 95) | def producer_subscriptions_for_datasets(dataset_ids, contact_id) do
method filter_out_admin_subscription (line 112) | def filter_out_admin_subscription(subscriptions, contact_id) do
method subscriptions_for_dataset_and_role (line 125) | def subscriptions_for_dataset_and_role(%DB.Dataset{id: dataset_id}, ro...
method create_producer_subscriptions (line 135) | def create_producer_subscriptions(%DB.Dataset{id: dataset_id}, %DB.Con...
method delete_other_producers_subscriptions (line 163) | def delete_other_producers_subscriptions(%DB.Dataset{id: dataset_id}, ...
method validate_reason_is_allowed_for_subscriptions (line 178) | defp validate_reason_is_allowed_for_subscriptions(changeset) do
method validate_reason_by_role (line 189) | def validate_reason_by_role(changeset) do
method validate_reason_by_scope (line 200) | def validate_reason_by_scope(changeset) do
FILE: apps/transport/lib/db/offer.ex
class DB.Offer (line 1) | defmodule DB.Offer
method changeset (line 26) | def changeset(model, attrs) do
method transform_modes (line 53) | defp transform_modes(%Ecto.Changeset{} = changeset, %{"modes" => modes...
method add_aom (line 57) | defp add_aom(%Ecto.Changeset{} = changeset, %{"aom_siren" => aom_siren...
FILE: apps/transport/lib/db/organization.ex
class DB.Organization (line 1) | defmodule DB.Organization
method base_query (line 27) | def base_query, do: from(o in __MODULE__, as: :organization)
method changeset (line 29) | def changeset(struct, attrs \\ %{}) do
FILE: apps/transport/lib/db/processing_report.ex
class DB.ProcessingReport (line 1) | defmodule DB.ProcessingReport
method changeset (line 19) | def changeset(processing_report, attrs) do
FILE: apps/transport/lib/db/proxy_request.ex
class DB.ProxyRequest (line 1) | defmodule DB.ProxyRequest
FILE: apps/transport/lib/db/region.ex
class DB.Region (line 1) | defmodule DB.Region
method national (line 22) | def national, do: DB.Repo.get_by!(DB.Region, nom: "National")
FILE: apps/transport/lib/db/repo.ex
class DB.Repo (line 1) | defmodule DB.Repo
FILE: apps/transport/lib/db/resource.ex
class DB.Resource (line 1) | defmodule DB.Resource
method base_query (line 79) | def base_query, do: from(r in DB.Resource, as: :resource)
method join_dataset_with_resource (line 81) | def join_dataset_with_resource(query) do
method filter_on_resource_id (line 86) | def filter_on_resource_id(query, resource_id) do
method filter_on_dataset_id (line 90) | def filter_on_dataset_id(query, dataset_id) do
method changeset (line 94) | def changeset(resource, params) do
method gtfs? (line 125) | def gtfs?(%__MODULE__{format: "GTFS"}), do: true
method gtfs? (line 126) | def gtfs?(_), do: false
method gbfs? (line 129) | def gbfs?(%__MODULE__{format: "gbfs"}), do: true
method gbfs? (line 130) | def gbfs?(_), do: false
method netex? (line 133) | def netex?(%__MODULE__{format: "NeTEx"}), do: true
method netex? (line 134) | def netex?(_), do: false
method gtfs_rt? (line 137) | def gtfs_rt?(%__MODULE__{format: "gtfs-rt"}), do: true
method gtfs_rt? (line 138) | def gtfs_rt?(%__MODULE__{format: "gtfsrt"}), do: true
method gtfs_rt? (line 139) | def gtfs_rt?(_), do: false
method siri? (line 142) | def siri?(%__MODULE__{format: "SIRI"}), do: true
method siri? (line 143) | def siri?(_), do: false
method siri_lite? (line 146) | def siri_lite?(%__MODULE__{format: "SIRI Lite"}), do: true
method siri_lite? (line 147) | def siri_lite?(_), do: false
method documentation? (line 150) | def documentation?(%__MODULE__{type: "documentation"}), do: true
method documentation? (line 151) | def documentation?(_), do: false
method community_resource? (line 154) | def community_resource?(%__MODULE__{is_community_resource: true}), do:...
method community_resource? (line 155) | def community_resource?(_), do: false
method real_time? (line 166) | def real_time?(%__MODULE__{} = resource) do
method requestor_ref (line 185) | def requestor_ref(%__MODULE__{format: "SIRI", dataset: %DB.Dataset{} =...
method requestor_ref (line 192) | def requestor_ref(%__MODULE__{}), do: nil
method has_schema? (line 195) | def has_schema?(%__MODULE__{schema_name: schema_name}), do: not is_nil...
method can_direct_download? (line 198) | def can_direct_download?(resource) do
method pan_resource? (line 216) | def pan_resource?(%__MODULE__{dataset: %DB.Dataset{organization_id: or...
method pan_resource? (line 220) | def pan_resource?(%__MODULE__{}), do: false
method other_resources_query (line 223) | def other_resources_query(%__MODULE__{} = resource),
method other_resources (line 231) | def other_resources(%__MODULE__{} = r),
method by_id (line 237) | def by_id(query, id) do
method get_related_files (line 244) | def get_related_files(%__MODULE__{id: resource_id}) do
method get_related_geojson_info (line 249) | def get_related_geojson_info(resource_id), do: get_related_conversion_...
method get_related_conversion_info (line 254) | def get_related_conversion_info(nil, _), do: nil
method get_related_conversion_info (line 256) | def get_related_conversion_info(resource_id, format) do
method content_updated_at (line 287) | def content_updated_at(%__MODULE__{id: id}), do: content_updated_at(id)
method content_updated_at (line 289) | def content_updated_at(resource_id) do
method download_validation_report_url (line 307) | def download_validation_report_url(%Plug.Conn{} = conn, %__MODULE__{fo...
method download_validation_report_url (line 311) | def download_validation_report_url(%Plug.Conn{} = _conn, %__MODULE__{}...
method download_url (line 315) | def download_url(%__MODULE__{} = resource) do
method download_url (line 320) | def download_url(
method download_url (line 341) | def download_url(%__MODULE__{dataset: %DB.Dataset{} = dataset} = resou...
method hosted_on_datagouv? (line 371) | def hosted_on_datagouv?(%__MODULE__{url: url}) do
method needs_stable_url? (line 376) | defp needs_stable_url?(%__MODULE__{latest_url: nil}), do: false
method needs_stable_url? (line 378) | defp needs_stable_url?(%__MODULE__{url: url} = resource) do
method link_to_folder? (line 394) | defp link_to_folder?(%URI{path: path}) do
method served_by_proxy? (line 413) | def served_by_proxy?(%__MODULE__{url: url}) do
method proxy_slug (line 425) | def proxy_slug(%__MODULE__{url: url} = resource) do
method proxy_namespace (line 440) | def proxy_namespace(%__MODULE__{}), do: "proxy"
method no_schema_name_for_public_transport (line 442) | def no_schema_name_for_public_transport(%Ecto.Changeset{} = changeset) do
method count_by_format (line 458) | def count_by_format(format) do
FILE: apps/transport/lib/db/resource_download.ex
class DB.ResourceDownload (line 1) | defmodule DB.ResourceDownload
FILE: apps/transport/lib/db/resource_history.ex
class DB.ResourceHistory (line 1) | defmodule DB.ResourceHistory
method base_query (line 25) | def base_query, do: from(rh in DB.ResourceHistory, as: :resource_history)
method join_resource_with_latest_resource_history (line 27) | def join_resource_with_latest_resource_history(query) do
method join_dataset_with_latest_resource_history (line 40) | def join_dataset_with_latest_resource_history(query) do
method latest_resource_history_query (line 46) | defp latest_resource_history_query(resource_id) do
method latest_resource_history (line 54) | def latest_resource_history(%DB.Resource{id: id}), do: latest_resource...
method latest_resource_history (line 56) | def latest_resource_history(resource_id) do
method latest_dataset_resources_history_infos (line 63) | def latest_dataset_resources_history_infos(%DB.Dataset{id: dataset_id}...
method gtfs_flex? (line 81) | def gtfs_flex?(%__MODULE__{payload: %{"format" => "GTFS", "filenames" ...
method gtfs_flex? (line 85) | def gtfs_flex?(%__MODULE__{}), do: false
FILE: apps/transport/lib/db/resource_metadata.ex
class DB.ResourceMetadata (line 1) | defmodule DB.ResourceMetadata
method base_query (line 21) | def base_query, do: from(rm in DB.ResourceMetadata, as: :metadata)
method join_validation_with_metadata (line 23) | def join_validation_with_metadata(query) do
method join_resource_with_metadata (line 28) | def join_resource_with_metadata(query) do
method where_up_to_date (line 33) | def where_up_to_date(query) do
FILE: apps/transport/lib/db/resource_monthly_metric.ex
class DB.ResourceMonthlyMetric (line 1) | defmodule DB.ResourceMonthlyMetric
method changeset (line 21) | def changeset(struct, attrs \\ %{}) do
method downloads_for_year (line 30) | def downloads_for_year(resources, year) do
method download_statistics (line 45) | def download_statistics(datasets) do
FILE: apps/transport/lib/db/resource_related.ex
class DB.ResourceRelated (line 1) | defmodule DB.ResourceRelated
method reason_to_str (line 21) | def reason_to_str(%__MODULE__{reason: reason}) do
FILE: apps/transport/lib/db/resource_unavailability.ex
class DB.ResourceUnavailability (line 1) | defmodule DB.ResourceUnavailability
method ongoing_unavailability (line 21) | def ongoing_unavailability(%Resource{id: resource_id}) do
method floor_float (line 48) | def floor_float(float, precision \\ 1) do
method uptime_per_day (line 79) | def uptime_per_day(%Resource{id: resource_id}, nb_days) do
method total_hours (line 103) | defp total_hours(rows, period_start) do
FILE: apps/transport/lib/db/reuse.ex
class DB.Reuse (line 1) | defmodule DB.Reuse
method base_query (line 38) | def base_query, do: from(r in __MODULE__, as: :reuse)
method search (line 40) | def search(%{} = args) do
method search_by_query (line 47) | def search_by_query(query, %{"q" => q}) do
method search_by_query (line 56) | def search_by_query(query, _), do: query
method search_by_dataset_type (line 65) | def search_by_dataset_type(query, _), do: query
method safe_like_pattern (line 74) | def safe_like_pattern(value) do
method changeset (line 78) | def changeset(model, attrs) do
method type_to_str (line 123) | def type_to_str(type) do
method cast_datasets (line 137) | defp cast_datasets(%Ecto.Changeset{} = changeset, %{"datasets" => data...
method transform_archived (line 149) | defp transform_archived(%Ecto.Changeset{} = changeset, params) do
method transform_featured (line 153) | defp transform_featured(%Ecto.Changeset{} = changeset, params) do
method transform_bool (line 157) | def transform_bool(%Ecto.Changeset{} = changeset, key, params) do
method transform_tags (line 164) | defp transform_tags(%Ecto.Changeset{} = changeset, %{"tags" => tags}) do
method transform_datagouv_id (line 168) | defp transform_datagouv_id(%Ecto.Changeset{} = changeset, %{"id" => id...
method transform_metric_keys (line 172) | defp transform_metric_keys(%Ecto.Changeset{} = changeset, attributes) do
FILE: apps/transport/lib/db/reuser_improved_data.ex
class DB.ReuserImprovedData (line 1) | defmodule DB.ReuserImprovedData
method base_query (line 19) | def base_query, do: from(rm in __MODULE__, as: :reuser_improved_data)
method changeset (line 21) | def changeset(%__MODULE__{} = struct, attrs \\ %{}) do
FILE: apps/transport/lib/db/stats_history.ex
class DB.StatsHistory (line 1) | defmodule DB.StatsHistory
FILE: apps/transport/lib/db/table_size_history.ex
class DB.TableSizeHistory (line 1) | defmodule DB.TableSizeHistory
FILE: apps/transport/lib/db/token.ex
class DB.Token (line 1) | defmodule DB.Token
method base_query (line 24) | def base_query, do: from(t in __MODULE__, as: :token)
method personal_token? (line 26) | def personal_token?(%__MODULE__{organization_id: nil}), do: true
method personal_token? (line 27) | def personal_token?(%__MODULE__{}), do: false
method changeset (line 29) | def changeset(%__MODULE__{} = struct, attrs \\ %{}) do
method organization_id (line 39) | def organization_id(%Ecto.Changeset{} = changeset) do
method generate_secret (line 51) | defp generate_secret(%Ecto.Changeset{} = changeset) do
method put_hashed_fields (line 56) | defp put_hashed_fields(%Ecto.Changeset{} = changeset) do
FILE: apps/transport/lib/db/user_feedback.ex
class DB.UserFeedback (line 1) | defmodule DB.UserFeedback
method changeset (line 29) | def changeset(%__MODULE__{} = feedback, %{} = attrs) do
method features (line 40) | def features, do: @features
method ratings (line 43) | def ratings, do: @ratings
method assoc_contact_from_user_id (line 45) | def assoc_contact_from_user_id(changeset, user_id) do
method sanitize_inputs (line 56) | defp sanitize_inputs(%Ecto.Changeset{} = changeset, keys) do
method sanitize_field (line 60) | defp sanitize_field(%Ecto.Changeset{} = changeset, key) do
method lowercase_email (line 67) | defp lowercase_email(%Ecto.Changeset{} = changeset) do
FILE: apps/transport/lib/enroute/chouette_valid_rulesets_client.ex
class Transport.EnRoute.ChouetteValidRulesetsClient (line 48) | defmodule Transport.EnRoute.ChouetteValidRulesetsClient
class Transport.EnRoute.ChouetteValidRulesetsClient.Wrapper (line 1) | defmodule Transport.EnRoute.ChouetteValidRulesetsClient.Wrapper
method list_versions (line 25) | def list_versions(slug) do
method find_ruleset_id (line 38) | def find_ruleset_id(expected_slug) do
method impl (line 45) | def impl, do: Application.get_env(:transport, :enroute_rulesets_client)
method list_rulesets (line 57) | def list_rulesets do
method get_ruleset (line 62) | def get_ruleset(slug) do
method create_ruleset (line 71) | def create_ruleset(definition, name, slug) do
method update_ruleset (line 76) | def update_ruleset(definition, name, slug) do
method delete_ruleset (line 81) | def delete_ruleset(ruleset_id) do
method upsert_ruleset (line 89) | defp upsert_ruleset(method, path, definition, name, slug) do
method base_request (line 117) | defp base_request do
method auth (line 121) | defp auth do
method http_client (line 125) | defp http_client, do: Transport.Req.impl()
class Transport.EnRoute.ChouetteValidRulesetsClient.Slugs (line 128) | defmodule Transport.EnRoute.ChouetteValidRulesetsClient.Slugs
method check_slug! (line 136) | def check_slug!(slug) do
method versioned_slug? (line 151) | def versioned_slug?(slug) do
method sibling_slugs? (line 167) | def sibling_slugs?(slug_a, slug_b) do
method parse_slug (line 174) | defp parse_slug(slug) do
FILE: apps/transport/lib/gtfs/utils.ex
class Transport.GTFS.Utils (line 1) | defmodule Transport.GTFS.Utils
method get_position (line 14) | def get_position(record, field) do
method convert_text_to_float (line 36) | def convert_text_to_float(input) do
method csv_get_with_default! (line 46) | def csv_get_with_default!(map, field, default_value, mandatory_column ...
method csv_get_with_default (line 66) | def csv_get_with_default(map, field, default_value) do
method to_stream_of_maps (line 80) | def to_stream_of_maps(file_stream) do
FILE: apps/transport/lib/http/utils.ex
class Transport.Http.Utils (line 1) | defmodule Transport.Http.Utils
method location_header (line 7) | def location_header(headers), do: header_value(headers, "location")
method header_value (line 9) | def header_value(headers, header) do
method reencode_body_to_utf8 (line 21) | def reencode_body_to_utf8(body, headers) do
method parse_charset (line 28) | defp parse_charset([content_type]), do: parse_charset(content_type)
method parse_charset (line 51) | defp parse_charset(_), do: nil
method reencode_body (line 57) | defp reencode_body(nil, body), do: body
method reencode_body (line 58) | defp reencode_body(:utf8, body), do: body
method reencode_body (line 61) | defp reencode_body(:latin1, body) do
method reencode_body (line 76) | defp reencode_body(other, body) do
FILE: apps/transport/lib/irve/data_frame.ex
class Transport.IRVE.DataFrame (line 1) | defmodule Transport.IRVE.DataFrame
method remap_schema_type (line 45) | def remap_schema_type(input_type, strict \\ true)
method remap_schema_type (line 47) | def remap_schema_type(input_type, true = _strict) do
method remap_schema_type (line 55) | def remap_schema_type(input_type, false = _strict) do
method dataframe_from_csv_body! (line 119) | def dataframe_from_csv_body!(body, schema \\ Transport.IRVE.StaticIRVE...
method guess_delimiter! (line 187) | def guess_delimiter!(body) do
method first_line (line 203) | def first_line(body) do
method remove_bom (line 209) | def remove_bom(string) do
method separators_frequencies (line 224) | def separators_frequencies(string) do
method preprocess_xy_coordinates (line 272) | def preprocess_xy_coordinates(df) do
method preprocess_boolean (line 311) | def preprocess_boolean(df, field_name, keep_as_string \\ false) do
method add_empty_column_if_missing (line 348) | def add_empty_column_if_missing(dataframe, field_name, keep_as_string ...
class ColumnDelimiterGuessError (line 140) | defmodule ColumnDelimiterGuessError
method message (line 150) | def message(%{col_seps_frequencies: frequencies}) do
FILE: apps/transport/lib/irve/database_exporter.ex
class Transport.IRVE.DatabaseExporter (line 1) | defmodule Transport.IRVE.DatabaseExporter
method export_to_csv (line 12) | def export_to_csv(path) do
method build_data_frame (line 16) | def build_data_frame do
method mutate_coordinates_columns (line 46) | def mutate_coordinates_columns(df) do
method database_field_list (line 58) | def database_field_list do
method additional_file_field_list (line 64) | def additional_file_field_list do
method export_field_list (line 79) | def export_field_list do
FILE: apps/transport/lib/irve/database_importer.ex
class Transport.IRVE.DatabaseImporter (line 1) | defmodule Transport.IRVE.DatabaseImporter
method try_write_to_db (line 23) | def try_write_to_db(file_path, %{
method write_to_db (line 49) | def write_to_db(
method write_new_file! (line 88) | defp write_new_file!(
method write_pdcs (line 126) | defp write_pdcs(rows_stream, file_id) do
method delete_previous_file_and_pdcs (line 138) | defp delete_previous_file_and_pdcs(datagouv_dataset_id, datagouv_resou...
FILE: apps/transport/lib/irve/deduplicator.ex
class Transport.IRVE.Deduplicator (line 1) | defmodule Transport.IRVE.Deduplicator
method add_duplicates_column (line 54) | def add_duplicates_column(%Explorer.DataFrame{} = df) do
method discard_duplicates (line 66) | def discard_duplicates(df) do
method remove_non_concerne_rule (line 79) | defp remove_non_concerne_rule(df) do
method unique_rule (line 89) | defp unique_rule(df) do
method in_prioritary_datasets_rule (line 99) | defp in_prioritary_datasets_rule(df) do
method date_maj_rule (line 125) | defp date_maj_rule(df) do
method datagouv_last_modified_rule (line 151) | defp datagouv_last_modified_rule(df) do
method exact_duplicate_in_same_file_rule (line 186) | defp exact_duplicate_in_same_file_rule(df) do
method remove_undecided_duplicates_rule (line 213) | defp remove_undecided_duplicates_rule(df) do
FILE: apps/transport/lib/irve/dynamic_irve_schema.ex
class Transport.IRVE.DynamicIRVESchema (line 1) | defmodule Transport.IRVE.DynamicIRVESchema
method schema_content (line 7) | def schema_content do
method build_schema_fields_list (line 12) | def build_schema_fields_list do
FILE: apps/transport/lib/irve/extractor.ex
class Transport.IRVE.Extractor (line 1) | defmodule Transport.IRVE.Extractor
method datagouv_resources (line 19) | def datagouv_resources(pagination_options \\ []) do
method process_data_gouv_page (line 35) | def process_data_gouv_page(%{url: url} = page) do
method fetch_in! (line 44) | def fetch_in!(list, [key]), do: Map.fetch!(list, key)
method fetch_in! (line 45) | def fetch_in!(list, [key | keys]), do: fetch_in!(Map.fetch!(list, key)...
method unpack_resources (line 50) | def unpack_resources(dataset) do
method extract_organization_or_owner_name (line 65) | defp extract_organization_or_owner_name(dataset) do
method extract_owner_name (line 69) | defp extract_owner_name(dataset) do
method remap_fields (line 76) | def remap_fields(resource) do
method download_and_parse_all (line 105) | def download_and_parse_all(resources, progress_callback \\ nil) do
method download_and_parse_one (line 147) | def download_and_parse_one(row, index) do
method process_resource_body (line 161) | def process_resource_body(%{http_status: 200} = row, body) do
method process_resource_body (line 176) | def process_resource_body(row, _body), do: row
method insert_report! (line 181) | def insert_report!(resources) do
FILE: apps/transport/lib/irve/fetcher.ex
class Transport.IRVE.Fetcher (line 1) | defmodule Transport.IRVE.Fetcher
method cache_dir (line 9) | def cache_dir, do: Path.join(__ENV__.file, "../../../../../cache-dir")...
method http_options (line 11) | def http_options do
method pages (line 28) | def pages(base_url, pagination_options \\ []) do
method get! (line 35) | def get!(url, options \\ []) do
FILE: apps/transport/lib/irve/http_pagination.ex
class Transport.IRVE.HTTPPagination (line 1) | defmodule Transport.IRVE.HTTPPagination
method naive_paginated_urls_stream (line 24) | def naive_paginated_urls_stream(base_url, http_client, http_client_opt...
method num_pages (line 76) | def num_pages(total_items: 0, items_per_page: _), do: 0
method num_pages (line 77) | def num_pages(total_items: total_items, items_per_page: items_per_page...
FILE: apps/transport/lib/irve/processing.ex
class Transport.IRVE.Processing (line 1) | defmodule Transport.IRVE.Processing
method read_as_data_frame (line 10) | def read_as_data_frame(body) do
method read_as_uncasted_data_frame (line 23) | def read_as_uncasted_data_frame(body) do
method convert_to_dataframe! (line 35) | def convert_to_dataframe!(body) do
method convert_to_uncasted_dataframe! (line 47) | defp convert_to_uncasted_dataframe!(body) do
method preprocess_coordinates (line 57) | def preprocess_coordinates(dataframe) do
method preprocess_boolean_fields (line 61) | def preprocess_boolean_fields(dataframe, keep_as_string \\ false) do
method add_missing_optional_columns (line 79) | def add_missing_optional_columns(dataframe, keep_as_string \\ false) do
method select_fields (line 86) | def select_fields(dataframe) do
FILE: apps/transport/lib/irve/raw_report_item.ex
class Transport.IRVE.RawReportItem (line 1) | defmodule Transport.IRVE.RawReportItem
FILE: apps/transport/lib/irve/raw_static_consolidation.ex
class Transport.IRVE.RawStaticConsolidation (line 1) | defmodule Transport.IRVE.RawStaticConsolidation
method download_resource_content! (line 51) | def download_resource_content!(url) do
method process_resource (line 60) | def process_resource(row, body, status, extension) do
method maybe_rename_bogus_num_pdl (line 109) | def maybe_rename_bogus_num_pdl(_, body), do: body
method ensure_utf8 (line 133) | def ensure_utf8(body) do
method run_cheap_blocking_checks (line 150) | def run_cheap_blocking_checks(body, extension) do
method ensure_producer_is_org! (line 174) | def ensure_producer_is_org!(%{dataset_organisation_id: "???"}), do: ra...
method ensure_producer_is_org! (line 176) | def ensure_producer_is_org!(_row), do: :ok
method log_debugging_stuff (line 182) | def log_debugging_stuff(resource_id, df) do
method maybe_concat_rows (line 196) | def maybe_concat_rows({:error, error}, main_df), do: {main_df, error}
method maybe_concat_rows (line 197) | def maybe_concat_rows({:ok, df}, nil), do: {df, nil}
method maybe_concat_rows (line 198) | def maybe_concat_rows({:ok, df}, main_df), do: {Explorer.DataFrame.con...
method exclude_irrelevant_resources (line 200) | def exclude_irrelevant_resources(stream) do
method build_report_item (line 210) | def build_report_item(row, body, extension, optional_error) do
method maybe_filter (line 221) | def maybe_filter(stream, nil), do: stream
method build_aggregate_and_report! (line 236) | def build_aggregate_and_report!(options \\ []) do
method process_individual_resource_and_report (line 263) | def process_individual_resource_and_report(row, main_df, report) do
FILE: apps/transport/lib/irve/simple_consolidation.ex
class Transport.IRVE.SimpleConsolidation (line 1) | defmodule Transport.IRVE.SimpleConsolidation
method process (line 22) | def process(opts \\ []) do
method maybe_log_items (line 63) | def maybe_log_items(stream, debug) do
method maybe_limit (line 74) | def maybe_limit(stream, nil), do: stream
method resource_list (line 76) | def resource_list do
method process_or_rescue (line 83) | def process_or_rescue(resource) do
method process_resource (line 90) | def process_resource(resource) do
method generate_report (line 134) | def generate_report(report_rows, destination: destination) do
method write_consolidated_file (line 180) | def write_consolidated_file(df, base_name, :send_to_s3) do
method write_consolidated_file (line 194) | def write_consolidated_file(df, base_name, :local_disk) do
method storage_path (line 200) | def storage_path(resource_id) do
method with_maybe_cached_download_on_disk (line 210) | def with_maybe_cached_download_on_disk(resource, file_path, extension,...
method with_maybe_cached_download_on_disk (line 221) | def with_maybe_cached_download_on_disk(resource, file_path, extension,...
method download! (line 226) | def download!(resource_id, url, file) do
FILE: apps/transport/lib/irve/simple_report_item.ex
class Transport.IRVE.SimpleReportItem (line 1) | defmodule Transport.IRVE.SimpleReportItem
method from_result (line 22) | def from_result({:error_occurred, error, resource}) do
method from_result (line 26) | def from_result({status, resource}) do
method to_map (line 30) | def to_map(%__MODULE__{} = report_row) do
method new (line 36) | defp new(resource, status, error) do
method maybe_error_message (line 52) | defp maybe_error_message(nil), do: nil
method maybe_error_message (line 53) | defp maybe_error_message(error), do: Exception.message(error)
method maybe_error_type (line 55) | defp maybe_error_type(nil), do: nil
method maybe_error_type (line 56) | defp maybe_error_type(error), do: error.__struct__ |> inspect()
FILE: apps/transport/lib/irve/static_irve_schema.ex
class Transport.IRVE.StaticIRVESchema (line 1) | defmodule Transport.IRVE.StaticIRVESchema
method schema_content (line 11) | def schema_content do
method field_names_list (line 31) | def field_names_list do
method boolean_columns (line 57) | def boolean_columns do
method optional_fields (line 74) | def optional_fields do
FILE: apps/transport/lib/irve/static_probes.ex
class Transport.IRVE.Static.Probes (line 1) | defmodule Transport.IRVE.Static.Probes
method first_line (line 12) | def first_line(body) do
method has_id_pdc_itinerance (line 25) | def has_id_pdc_itinerance(body) do
method hint_header_separator (line 55) | def hint_header_separator(body) do
method maybe_find_header_separator (line 64) | def maybe_find_header_separator(regex, first_line) do
method probably_v1_schema (line 78) | def probably_v1_schema(body) do
FILE: apps/transport/lib/irve/validator/data_frame_validation.ex
class Transport.IRVE.Validator.DataFrameValidation (line 1) | defmodule Transport.IRVE.Validator.DataFrameValidation
method setup_computed_field_validation_columns (line 17) | def setup_computed_field_validation_columns(%Explorer.DataFrame{} = df...
method extract_field_definition_data! (line 30) | def extract_field_definition_data!(field_def) do
method setup_computed_field_validation_column (line 46) | def setup_computed_field_validation_column(
method setup_computed_row_validation_column (line 71) | def setup_computed_row_validation_column(%Explorer.DataFrame{} = df) do
FILE: apps/transport/lib/irve/validator/field_validation.ex
class Transport.IRVE.Validator.FieldValidation (line 1) | defmodule Transport.IRVE.Validator.FieldValidation
method column_valid? (line 105) | def column_valid?(_df, field_name, type, format, constraints) do
FILE: apps/transport/lib/irve/validator/summary.ex
class Transport.IRVE.Validator.Summary (line 1) | defmodule Transport.IRVE.Validator.Summary
FILE: apps/transport/lib/irve/validator/validator.ex
class Transport.IRVE.Validator (line 1) | defmodule Transport.IRVE.Validator
method compute_validation (line 6) | def compute_validation(%Explorer.DataFrame{} = df) do
method validate (line 21) | def validate(path, extension \\ ".csv") do
method full_file_valid? (line 38) | def full_file_valid?(%Explorer.DataFrame{} = df) do
method summarize (line 54) | def summarize(%Explorer.DataFrame{} = df) do
method validate_and_summarize (line 81) | def validate_and_summarize(path, extension \\ ".csv") do
method summarize_total_counts (line 98) | defp summarize_total_counts(df) do
method summarize_column_errors (line 104) | defp summarize_column_errors(df) do
method error_samples (line 114) | defp error_samples(df, column_errors) do
FILE: apps/transport/lib/jobs/RamboLauncher.ex
class Transport.RamboLauncher (line 1) | defmodule Transport.RamboLauncher
method impl (line 7) | def impl, do: Application.get_env(:transport, :rambo_impl)
method run (line 9) | def run(binary_path, args, opts \\ []), do: impl().run(binary_path, ar...
class Transport.Rambo (line 12) | defmodule Transport.Rambo
method run (line 19) | def run(binary_path, args, opts) do
FILE: apps/transport/lib/jobs/analyze_irve_job.ex
class Transport.Jobs.AnalyzeIRVEJob (line 1) | defmodule Transport.Jobs.AnalyzeIRVEJob
method perform (line 17) | def perform(%Oban.Job{id: job_id}) do
method start_async_work (line 24) | def start_async_work do
method notify (line 53) | def notify(job_id, status, progress \\ nil) do
method wait_for_work_completion (line 63) | def wait_for_work_completion(job_id) do
FILE: apps/transport/lib/jobs/archive_metrics_job.ex
class Transport.Jobs.ArchiveMetricsJob (line 1) | defmodule Transport.Jobs.ArchiveMetricsJob
method perform (line 26) | def perform(%Oban.Job{args: %{"date" => date}}) do
method to_midnight_datetime (line 77) | def to_midnight_datetime(%Date{} = date) do
method days_to_archive (line 81) | def days_to_archive do
FILE: apps/transport/lib/jobs/backfill/backfill_metadata_non_gtfs_resource_history.ex
class Transport.Jobs.Backfill.ResourceHistoryMetadataNonGTFS (line 1) | defmodule Transport.Jobs.Backfill.ResourceHistoryMetadataNonGTFS
method perform (line 17) | def perform(%{args: %{"resource_history_id" => resource_history_id, "b...
method perform (line 29) | def perform(%{args: %{"resource_history_id" => resource_history_id}}) do
method fetch_next (line 34) | def fetch_next(resource_history_id) do
method update_resource_history (line 44) | def update_resource_history(resource_history_id) do
method do_update (line 55) | def do_update(%ResourceHistory{} = rh), do: rh
FILE: apps/transport/lib/jobs/backfill/backfill_resource_history_filesize.ex
class Transport.Jobs.Backfill.ResourceHistoryFileSize (line 1) | defmodule Transport.Jobs.Backfill.ResourceHistoryFileSize
method perform (line 11) | def perform(%{args: %{"resource_history_id" => resource_history_id, "b...
method perform (line 23) | def perform(%{args: %{"resource_history_id" => resource_history_id}}) do
method fetch_next (line 28) | def fetch_next(resource_history_id) do
method update_resource_filesize (line 37) | def update_resource_filesize(resource_history_id) do
method do_update (line 44) | def do_update(%{payload: %{"total_compressed_size" => total_compressed...
method do_update (line 53) | def do_update(%{payload: %{"permanent_url" => url} = payload} = rh) do
FILE: apps/transport/lib/jobs/backfill/backfill_resource_history_resource_id.ex
class Transport.Jobs.Backfill.ResourceHistoryResourceId (line 1) | defmodule Transport.Jobs.Backfill.ResourceHistoryResourceId
method perform (line 8) | def perform(%{}) do
method execute (line 45) | defp execute(query), do: query |> DB.Repo.query!()
FILE: apps/transport/lib/jobs/backfill/backfill_resource_history_schema_details.ex
class Transport.Jobs.Backfill.ResourceHistorySchemaDetails (line 1) | defmodule Transport.Jobs.Backfill.ResourceHistorySchemaDetails
method perform (line 9) | def perform(%{}) do
method execute (line 32) | defp execute(query), do: query |> DB.Repo.query!()
FILE: apps/transport/lib/jobs/backfill/remove_gtfs_rt_snapshots.ex
class Transport.Jobs.Backfill.RemoveGTFSRTSnapshots (line 1) | defmodule Transport.Jobs.Backfill.RemoveGTFSRTSnapshots
method perform (line 11) | def perform(%{}) do
method count_relevant_files (line 16) | def count_relevant_files do
method relevant_file_keys (line 20) | def relevant_file_keys do
method multi_validation_gtfs_rt_filenames (line 34) | def multi_validation_gtfs_rt_filenames do
FILE: apps/transport/lib/jobs/clean_multi_validation_job.ex
class Transport.Jobs.CleanMultiValidationJob (line 1) | defmodule Transport.Jobs.CleanMultiValidationJob
method perform (line 30) | def perform(%Oban.Job{args: %{"static" => true} = job_args}) do
method perform (line 65) | def perform(%Oban.Job{args: %{"real_time" => true} = job_args}) do
method max_records (line 93) | def max_records, do: Application.get_env(:transport, :clean_multi_vali...
method archive_records (line 95) | defp archive_records(ids) do
FILE: apps/transport/lib/jobs/clean_on_demand_validation_job.ex
class Transport.Jobs.CleanOnDemandValidationJob (line 1) | defmodule Transport.Jobs.CleanOnDemandValidationJob
method retention_days (line 13) | def retention_days, do: @days_before_archiving
method perform (line 28) | def perform(%Oban.Job{args: %{"date" => date}}) do
method days_to_archive (line 43) | def days_to_archive do
method multi_validation_in_scope (line 54) | defp multi_validation_in_scope do
FILE: apps/transport/lib/jobs/consolidate_bnlc_job.ex
class Transport.Jobs.ConsolidateBNLCJob (line 1) | defmodule Transport.Jobs.ConsolidateBNLCJob
method perform (line 55) | def perform(%Oban.Job{args: %{"action" => "delete_s3_file", "filename"...
method perform (line 66) | def perform(%Oban.Job{id: job_id, args: %{"action" => "datagouv_update...
method perform (line 78) | def perform(%Oban.Job{id: job_id}) do
method consolidate (line 85) | def consolidate do
method replace_file_on_datagouv (line 116) | def replace_file_on_datagouv do
method validator_unavailable? (line 129) | defp validator_unavailable?(validation_errors) do
method upload_temporary_file (line 135) | defp upload_temporary_file do
method schedule_deletion (line 142) | defp schedule_deletion(filename) do
method send_email_recap (line 152) | def send_email_recap(filename, %{} = errors) do
method format_errors (line 172) | def format_errors(%{dataset_errors: _, validation_errors: _, download_...
method format_dataset_errors (line 185) | def format_dataset_errors(%{dataset_errors: []}), do: ""
method format_dataset_errors (line 187) | def format_dataset_errors(%{dataset_errors: dataset_errors}) do
method format_validation_errors (line 204) | def format_validation_errors(%{validation_errors: []}), do: ""
method format_validation_errors (line 206) | def format_validation_errors(%{validation_errors: validation_errors}) do
method format_download_errors (line 214) | def format_download_errors(%{download_errors: []}), do: ""
method format_download_errors (line 216) | def format_download_errors(%{download_errors: download_errors}) do
method format_decode_errors (line 229) | def format_decode_errors(%{decode_errors: []}), do: ""
method format_decode_errors (line 231) | def format_decode_errors(%{decode_errors: decode_errors}) do
method link_to_dataset (line 247) | def link_to_dataset(%{"page" => page_url, "title" => title}) do
method link_to_resource (line 261) | def link_to_resource({:error, dataset_details, resource}), do: link_to...
method bnlc_csv_headers (line 269) | def bnlc_csv_headers do
method final_csv_headers (line 289) | def final_csv_headers(bnlc_headers) do
method consolidate_resources (line 303) | def consolidate_resources(resources_details) do
method add_columns (line 351) | def add_columns(%Stream{} = stream, %{dataset: %{"id" => dataset_id}, ...
method datagouv_dataset_slugs (line 367) | def datagouv_dataset_slugs do
method guess_csv_separator (line 384) | def guess_csv_separator(body) do
method guess_encoding (line 405) | def guess_encoding(path) do
method can_stream_for_encoding? (line 410) | def can_stream_for_encoding?(path, encoding) do
method extract_dataset_details (line 431) | def extract_dataset_details(slugs) do
method get_dataset_details (line 439) | defp get_dataset_details(slug) do
method valid_datagouv_resources (line 465) | def valid_datagouv_resources(datasets_details) do
method download_resources (line 495) | def download_resources(resources_details) do
method guess_csv_details_and_decode (line 515) | def guess_csv_details_and_decode({dataset_details, %{"id" => resource_...
method check_can_decode_csv (line 532) | defp check_can_decode_csv(
method with_appropriate_schema? (line 576) | def with_appropriate_schema?(%{}), do: false
method xlsx? (line 587) | def xlsx?(%{"format" => "xlsx"}), do: true
method xlsx? (line 588) | def xlsx?(%{"title" => title}), do: String.contains?(title |> String.d...
method xlsx? (line 589) | def xlsx?(_), do: false
method dataset_slug_to_url (line 595) | def dataset_slug_to_url(slug) do
method http_client (line 599) | defp http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
method consolidation_configuration (line 601) | defp consolidation_configuration do
FILE: apps/transport/lib/jobs/consolidate_lez_job.ex
class Transport.Jobs.ConsolidateLEZsJob (line 1) | defmodule Transport.Jobs.ConsolidateLEZsJob
method perform (line 21) | def perform(%Oban.Job{id: job_id}) do
method consolidate (line 27) | def consolidate do
method update_files (line 37) | def update_files(consolidated_data) do
method write_file (line 59) | def write_file(filepath, content) do
method type (line 67) | def type(%Resource{dataset: %Dataset{type: @lez_dataset_type}} = resou...
method voie? (line 85) | def voie?(%Resource{url: url, dataset: %Dataset{type: @lez_dataset_typ...
method relevant_resources (line 89) | def relevant_resources do
method consolidate_features (line 103) | def consolidate_features(resources) do
method content_features (line 115) | defp content_features({%Resource{} = resource, %ResourceHistory{payloa...
method latest_valid_resource_history (line 124) | defp latest_valid_resource_history(%Resource{id: resource_id}) do
method add_publisher (line 136) | defp add_publisher(features, publisher_details) do
method publisher_details (line 144) | def publisher_details(%Resource{dataset: %Dataset{} = dataset}) do
method zfe_id (line 170) | def zfe_id(siren) do
method pan_publisher (line 182) | def pan_publisher do
method resource_id_for_type (line 186) | def resource_id_for_type(type) do
method consolidation_configuration (line 190) | def consolidation_configuration do
method http_client (line 218) | defp http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
FILE: apps/transport/lib/jobs/conversions/clean_orphan_conversions_job.ex
class Transport.Jobs.CleanOrphanConversionsJob (line 1) | defmodule Transport.Jobs.CleanOrphanConversionsJob
method perform (line 13) | def perform(%Oban.Job{}) do
method mark_for_deletion (line 34) | defp mark_for_deletion(ids) do
method remove_rows (line 41) | defp remove_rows(ids) do
method remove_s3_objects (line 45) | defp remove_s3_objects(paths) do
FILE: apps/transport/lib/jobs/conversions/generic_converter.ex
class Transport.Jobs.GenericConverter (line 1) | defmodule Transport.Jobs.GenericConverter
method enqueue_all_conversion_jobs (line 11) | def enqueue_all_conversion_jobs(source_format, target_format, conversi...
method resource_of_format? (line 53) | defp resource_of_format?(expected_format, %{payload: %{"format" => for...
method resource_of_format? (line 54) | defp resource_of_format?(_, _), do: false
method conversion_exists? (line 76) | def conversion_exists?(nil, _, _, _), do: false
method conversion_file_name (line 189) | defp conversion_file_name(resource_name, source_format, target_format),
method add_zip_extension (line 192) | defp add_zip_extension(path, true = _zip_conversion?), do: "#{path}.zip"
method add_zip_extension (line 193) | defp add_zip_extension(path, _), do: path
class Transport.FolderZipper (line 196) | defmodule Transport.FolderZipper
method zip (line 200) | def zip(folder_path, zip_name) do
FILE: apps/transport/lib/jobs/conversions/gtfs_generic_converter.ex
class Transport.Jobs.GTFSGenericConverter (line 1) | defmodule Transport.Jobs.GTFSGenericConverter
method enqueue_all_conversion_jobs (line 16) | def enqueue_all_conversion_jobs(format, conversion_job_module) do
method perform_single_conversion_job (line 24) | def perform_single_conversion_job(resource_history_id, format, convert...
FILE: apps/transport/lib/jobs/conversions/gtfs_to_geojson_converter_job.ex
class Transport.Jobs.GTFSToGeoJSONConverterJob (line 1) | defmodule Transport.Jobs.GTFSToGeoJSONConverterJob
method perform (line 9) | def perform(%{}) do
class Transport.Jobs.SingleGTFSToGeoJSONConverterJob (line 14) | defmodule Transport.Jobs.SingleGTFSToGeoJSONConverterJob
method perform (line 24) | def perform(%{args: %{"resource_history_id" => resource_history_id}}) do
class Transport.GTFSToGeoJSONConverter (line 29) | defmodule Transport.GTFSToGeoJSONConverter
method convert (line 35) | def convert(gtfs_file_path, geojson_file_path) do
method converter (line 45) | def converter, do: "rust-transit/gtfs-to-geojson"
method converter_version (line 48) | def converter_version, do: "0.3.1"
FILE: apps/transport/lib/jobs/conversions/netex_generic_converter.ex
class Transport.Jobs.NeTExGenericConverter (line 1) | defmodule Transport.Jobs.NeTExGenericConverter
method enqueue_all_conversion_jobs (line 16) | def enqueue_all_conversion_jobs(format, conversion_job_module) do
method perform_single_conversion_job (line 24) | def perform_single_conversion_job(resource_history_id, format, convert...
FILE: apps/transport/lib/jobs/conversions/netex_to_geojson_converter_job.ex
class Transport.Jobs.NeTExToGeoJSONConverterJob (line 1) | defmodule Transport.Jobs.NeTExToGeoJSONConverterJob
method perform (line 9) | def perform(%{}) do
class Transport.Jobs.SingleNeTExToGeoJSONConverterJob (line 14) | defmodule Transport.Jobs.SingleNeTExToGeoJSONConverterJob
method perform (line 24) | def perform(%{args: %{"resource_history_id" => resource_history_id}}) do
class Transport.NeTExToGeoJSONConverter (line 33) | defmodule Transport.NeTExToGeoJSONConverter
method convert (line 39) | def convert(netex_file_path, geojson_file_path) do
method converter (line 50) | def converter, do: "etalab/transport-site"
method converter_version (line 53) | def converter_version, do: "0.1.0"
FILE: apps/transport/lib/jobs/create_tokens_job.ex
class Transport.Jobs.CreateTokensJob (line 1) | defmodule Transport.Jobs.CreateTokensJob
method get_all_contact_ids_having_a_default_token (line 14) | def get_all_contact_ids_having_a_default_token do
method get_all_contact_ids_in_org (line 19) | def get_all_contact_ids_in_org do
method perform (line 30) | def perform(%Oban.Job{args: %{"action" => "create_token_for_contact", ...
method perform (line 49) | def perform(%Oban.Job{args: %{"organization_id" => organization_id}}) do
method perform (line 66) | def perform(%Oban.Job{args: %{"action" => "set_default_token_for_conta...
method perform (line 82) | def perform(%Oban.Job{args: %{"action" => "create_tokens_for_contacts_...
method perform (line 97) | def perform(%Oban.Job{args: %{"action" => "create_tokens_for_organizat...
method create_default_token_for_contact (line 113) | defp create_default_token_for_contact(%DB.Contact{id: contact_id} = co...
method set_default_token_for_contact (line 126) | defp set_default_token_for_contact(%DB.Contact{organizations: organiza...
method create_token_for_organization (line 140) | defp create_token_for_organization(%DB.Contact{} = contact, %DB.Organi...
method set_default_token_for_contact (line 149) | defp set_default_token_for_contact(%DB.Token{id: token_id}, %DB.Contac...
FILE: apps/transport/lib/jobs/custom_logo_conversion_job.ex
class Transport.Jobs.CustomLogoConversionJob (line 1) | defmodule Transport.Jobs.CustomLogoConversionJob
method perform (line 10) | def perform(%Oban.Job{args: %{"datagouv_id" => datagouv_id, "path" => ...
method stream_to_s3 (line 53) | defp stream_to_s3(local_path, remote_path) do
FILE: apps/transport/lib/jobs/database_backup_replication_job.ex
class Transport.Jobs.DatabaseBackupReplicationJob (line 1) | defmodule Transport.Jobs.DatabaseBackupReplicationJob
method perform (line 18) | def perform(%Oban.Job{}) do
method ensure_destination_permissions_are_appropriate! (line 28) | def ensure_destination_permissions_are_appropriate! do
method upload! (line 41) | def upload!(dump) do
method check_appropriate_size! (line 59) | def check_appropriate_size! do
method latest_dump_for_date (line 71) | def latest_dump_for_date(latest_dumps, %Date{} = date) do
method latest_dump (line 79) | def latest_dump, do: List.first(latest_source_dumps(1))
method bucket_name (line 87) | def bucket_name(target), do: Map.fetch!(find_config(target), :bucket_n...
method hours_in_seconds (line 95) | def hours_in_seconds(hours), do: hours * 60 * 60
method check_dump_not_too_large! (line 107) | def check_dump_not_too_large!(dump) do
method dump_size (line 115) | def dump_size(%{size: size_str}) do
method check_dump_is_recent_enough! (line 120) | def check_dump_is_recent_enough!(%{last_modified: last_modified_str} =...
method max_size_threshold (line 130) | def max_size_threshold, do: gigabytes(10)
method recent_enough_threshold (line 131) | def recent_enough_threshold, do: hours_in_seconds(12)
method upload_filename (line 133) | defp upload_filename(%{key: key}) do
FILE: apps/transport/lib/jobs/database_vacuum_job.ex
class Transport.Jobs.DatabaseVacuumJob (line 1) | defmodule Transport.Jobs.DatabaseVacuumJob
method perform (line 10) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/dataset_history_job.ex
class Transport.Jobs.DatasetHistoryDispatcherJob (line 1) | defmodule Transport.Jobs.DatasetHistoryDispatcherJob
method perform (line 10) | def perform(_job) do
class Transport.Jobs.DatasetHistoryJob (line 22) | defmodule Transport.Jobs.DatasetHistoryJob
method perform (line 35) | def perform(%Oban.Job{args: %{"dataset_id" => dataset_id}}) do
method get_preloaded_dataset (line 90) | def get_preloaded_dataset(dataset_id) do
FILE: apps/transport/lib/jobs/dataset_now_on_nap_notification_job.ex
class Transport.Jobs.DatasetNowOnNAPNotificationJob (line 1) | defmodule Transport.Jobs.DatasetNowOnNAPNotificationJob
method perform (line 11) | def perform(%Oban.Job{args: %{"dataset_id" => dataset_id}, id: job_id}...
method save_notification (line 31) | defp save_notification(%DB.Contact{id: contact_id, email: email}, %DB....
method reject_already_sent (line 41) | defp reject_already_sent(notification_subscriptions, %DB.Dataset{} = d...
method email_addresses_already_sent (line 50) | defp email_addresses_already_sent(%DB.Dataset{id: dataset_id}) do
FILE: apps/transport/lib/jobs/dataset_quality_score.ex
class Transport.Jobs.DatasetQualityScoreDispatcher (line 1) | defmodule Transport.Jobs.DatasetQualityScoreDispatcher
method perform (line 9) | def perform(%Oban.Job{}) do
class Transport.Jobs.DatasetQualityScore (line 20) | defmodule Transport.Jobs.DatasetQualityScore
method perform (line 30) | def perform(%Oban.Job{args: %{"dataset_id" => dataset_id}}) do
method build_details (line 51) | def build_details(%{} = details, %{} = last_score) do
method build_details (line 55) | def build_details(details, last_score), do: build_details(details || %...
method exp_smoothing (line 68) | def exp_smoothing(previous_score, today_score, :compliance) do
method exp_smoothing (line 76) | def exp_smoothing(previous_score, today_score, alpha) do
method last_dataset_score (line 81) | def last_dataset_score(dataset_id, topic) do
method average (line 109) | def average([]), do: nil
method average (line 110) | def average(e), do: Enum.sum(e) / Enum.count(e)
method dataset_resources (line 113) | def dataset_resources(dataset_id) do
method save_dataset_score (line 122) | def save_dataset_score(dataset_id, topic) do
method dataset_score (line 148) | def dataset_score(dataset_id, topic) do
method compute_fn_for_topic (line 172) | def compute_fn_for_topic(topic) do
class Transport.Jobs.DatasetComplianceScore (line 184) | defmodule Transport.Jobs.DatasetComplianceScore
method current_dataset_compliance (line 218) | def current_dataset_compliance(dataset_id) do
method resource_compliance (line 250) | def resource_compliance({resource_id, [%DB.MultiValidation{validator: ...
method resource_compliance (line 255) | def resource_compliance(
method resource_compliance (line 263) | def resource_compliance({resource_id, [%DB.MultiValidation{validator: ...
method resource_compliance (line 280) | def resource_compliance({resource_id, [%DB.MultiValidation{digest: %{"...
class Transport.Jobs.DatasetAvailabilityScore (line 286) | defmodule Transport.Jobs.DatasetAvailabilityScore
method current_dataset_availability (line 304) | def current_dataset_availability(dataset_id) do
method resource_availability (line 322) | def resource_availability(%DB.Resource{id: resource_id} = resource) do
method resource_ids_with_unavailabilities (line 363) | def resource_ids_with_unavailabilities do
class Transport.Jobs.DatasetFreshnessScore (line 380) | defmodule Transport.Jobs.DatasetFreshnessScore
method current_dataset_freshness (line 401) | def current_dataset_freshness(dataset_id) do
method resource_freshness (line 423) | def resource_freshness(%DB.Resource{format: "GTFS" = format, id: resou...
method resource_freshness (line 458) | def resource_freshness(%DB.Resource{format: "gbfs" = format, id: resou...
method resource_freshness (line 481) | def resource_freshness(%DB.Resource{format: "gtfs-rt" = format, id: re...
method resource_freshness (line 504) | def resource_freshness(%DB.Resource{format: format, id: resource_id}),
method gtfs_freshness (line 529) | def gtfs_freshness(start_date, end_date) do
method gbfs_max_timestamp_delay (line 546) | def gbfs_max_timestamp_delay, do: 5 * 60
method gbfs_feed_freshness (line 556) | def gbfs_feed_freshness(feed_timestamp_delay) do
method gtfs_rt_max_timestamp_delay (line 563) | def gtfs_rt_max_timestamp_delay, do: 5 * 60
method gtfs_rt_feed_freshness (line 573) | def gtfs_rt_feed_freshness(feed_timestamp_delay) do
method resource_last_metadata_from_today (line 577) | def resource_last_metadata_from_today(resource_id) do
FILE: apps/transport/lib/jobs/datasets_climate_resilience_bill_not_lo_licence_job.ex
class Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob (line 1) | defmodule Transport.Jobs.DatasetsClimateResilienceBillNotLOLicenceJob
method perform (line 11) | def perform(%Oban.Job{}) do
method relevant_datasets (line 23) | def relevant_datasets do
method remove_climate_resilience_bill_tag (line 30) | def remove_climate_resilience_bill_tag(datasets) do
FILE: apps/transport/lib/jobs/datasets_switching_climate_resilience_bill_job.ex
class Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob (line 1) | defmodule Transport.Jobs.DatasetsSwitchingClimateResilienceBillJob
method perform (line 12) | def perform(%Oban.Job{inserted_at: %DateTime{} = inserted_at}) do
method send_email (line 21) | def send_email([], []), do: :ok
method send_email (line 23) | def send_email(datasets_previously_climate_resilience, datasets_now_cl...
method dataset_ids (line 49) | defp dataset_ids(payload) do
method datasets_previously_climate_resilience_bill (line 53) | def datasets_previously_climate_resilience_bill(result) do
method datasets_now_climate_resilience_bill (line 59) | def datasets_now_climate_resilience_bill(result) do
method has_climate_resilience_bill_tag? (line 73) | def has_climate_resilience_bill_tag?(%DB.DatasetHistory{payload: %{"cu...
method datasets_custom_tags_changes (line 77) | def datasets_custom_tags_changes(%Date{} = date) do
FILE: apps/transport/lib/jobs/datasets_without_gtfs_rt_related_resources_notification_job.ex
class Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificationJob (line 1) | defmodule Transport.Jobs.DatasetsWithoutGTFSRTRelatedResourcesNotificati...
method perform (line 10) | def perform(%Oban.Job{}) do
method send_email (line 14) | def send_email([]), do: :ok
method send_email (line 16) | def send_email(datasets) do
method relevant_datasets (line 31) | def relevant_datasets do
FILE: apps/transport/lib/jobs/dedupe_history_job.ex
class Transport.Jobs.DedupeHistoryDispatcherJob (line 1) | defmodule Transport.Jobs.DedupeHistoryDispatcherJob
method perform (line 14) | def perform(%Oban.Job{}) do
class Transport.Jobs.DedupeHistoryJob (line 29) | defmodule Transport.Jobs.DedupeHistoryJob
method perform (line 39) | def perform(%Oban.Job{args: %{"datagouv_id" => datagouv_id}}) do
method same? (line 58) | def same?(%ResourceHistory{payload: %{"content_hash" => a}}, %Resource...
method same? (line 62) | def same?(
method mark_for_deletion (line 69) | defp mark_for_deletion(ids) do
method remove_resource_history_rows (line 76) | defp remove_resource_history_rows(ids) do
method remove_s3_objects (line 80) | defp remove_s3_objects(paths) do
method shas (line 84) | defp shas(%ResourceHistory{payload: %{"zip_metadata" => zip_metadata}}...
FILE: apps/transport/lib/jobs/default_tokens_job.ex
class Transport.Jobs.DefaultTokensJob (line 1) | defmodule Transport.Jobs.DefaultTokensJob
method perform (line 13) | def perform(%Oban.Job{args: %{"contact_id" => contact_id, "organizatio...
method perform (line 30) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/expiration_notification_job.ex
class Transport.Jobs.ExpirationNotificationJob (line 1) | defmodule Transport.Jobs.ExpirationNotificationJob
method perform (line 48) | def perform(%Oban.Job{
method send_admin_and_producer_notifications (line 94) | defp send_admin_and_producer_notifications(job_id) do
method compute_producer_admin_expiration_data (line 103) | defp compute_producer_admin_expiration_data do
method send_admin_mail (line 112) | defp send_admin_mail([] = records), do: records
method send_admin_mail (line 114) | defp send_admin_mail(records) do
method send_producer_notifications (line 122) | defp send_producer_notifications({delay, records}, job_id) do
method dispatch_reuser_digest_jobs (line 140) | defp dispatch_reuser_digest_jobs(%Date{} = target_date) do
method insert_reuser_digest_jobs (line 156) | defp insert_reuser_digest_jobs(contact_ids, %Date{} = target_date) do
method send_reuser_email (line 171) | defp send_reuser_email(%DB.Contact{} = contact, html) do
method save_reuser_notifications (line 176) | defp save_reuser_notifications(%DB.Contact{} = contact, delays_and_dat...
method reuser_email_body (line 194) | defp reuser_email_body(records) do
method datasets_body (line 201) | defp datasets_body({delay, datasets}) do
method dataset_link (line 210) | defp dataset_link(%DB.Dataset{slug: slug, custom_title: custom_title}) do
method subscribed_dataset_ids_for_expiration (line 219) | defp subscribed_dataset_ids_for_expiration(%DB.Contact{id: contact_id}...
method contact_ids_subscribed_to_dataset_ids (line 229) | def contact_ids_subscribed_to_dataset_ids(dataset_ids) do
method gtfs_expiring_on_target_dates (line 248) | def gtfs_expiring_on_target_dates(%Date{} = reference_date) do
FILE: apps/transport/lib/jobs/gbfs_multi_validation_job.ex
class Transport.Jobs.GBFSMultiValidationDispatcherJob (line 1) | defmodule Transport.Jobs.GBFSMultiValidationDispatcherJob
method perform (line 10) | def perform(%Oban.Job{}) do
method relevant_resources (line 18) | def relevant_resources do
class Transport.Jobs.GBFSMultiValidationJob (line 26) | defmodule Transport.Jobs.GBFSMultiValidationJob
method perform (line 34) | def perform(%Oban.Job{args: %{"resource_id" => resource_id}}) do
FILE: apps/transport/lib/jobs/gbfs_operators_notification_job.ex
class Transport.Jobs.GBFSOperatorsNotificationJob (line 1) | defmodule Transport.Jobs.GBFSOperatorsNotificationJob
method perform (line 10) | def perform(%Oban.Job{}) do
method relevant_feeds (line 14) | def relevant_feeds do
method send_email (line 21) | def send_email([]), do: :ok
method send_email (line 23) | def send_email(resources) do
FILE: apps/transport/lib/jobs/geo_data/base.ex
class Transport.Jobs.BaseGeoData (line 1) | defmodule Transport.Jobs.BaseGeoData
method insert_data (line 14) | def insert_data(geo_data_import_id, prepare_data_for_insert_fn) do
method insert_data (line 20) | def insert_data(body, geo_data_import_id, prepare_data_for_insert_fn) do
method needs_import? (line 28) | defp needs_import?(
method needs_import? (line 34) | defp needs_import?(_, nil), do: true
method prepare_csv_data_for_import (line 103) | def prepare_csv_data_for_import(body, prepare_data_fn, opts \\ []) do
method parse_coordinate (line 121) | def parse_coordinate(s) do
method string_to_float (line 126) | defp string_to_float(s), do: s |> String.trim() |> String.replace([" "...
FILE: apps/transport/lib/jobs/geo_data/bnlc_to_geo_data.ex
class Transport.Jobs.BNLCToGeoData (line 1) | defmodule Transport.Jobs.BNLCToGeoData
method perform (line 10) | def perform(%{}) do
method prepare_data_for_insert (line 14) | def prepare_data_for_insert(body, geo_data_import_id) do
FILE: apps/transport/lib/jobs/geo_data/gbfs_stations_to_geo_data.ex
class Transport.Jobs.GBFSStationsToGeoData (line 1) | defmodule Transport.Jobs.GBFSStationsToGeoData
method perform (line 15) | def perform(%Oban.Job{}) do
method prepare_data_for_insert (line 19) | def prepare_data_for_insert(geo_data_import_id) do
method prepare_stations_data (line 31) | def prepare_stations_data(gbfs_url, geo_data_import_id) do
method virtual_station? (line 55) | defp virtual_station?(%{"is_virtual_station" => true}), do: true
method virtual_station? (line 56) | defp virtual_station?(%{}), do: false
method missing_coordinates? (line 58) | defp missing_coordinates?(%{"lat" => lat, "lon" => lon}) do
method missing_coordinates? (line 62) | defp missing_coordinates?(%{}), do: true
method station_name (line 68) | defp station_name(%{"name" => names}) do
method relevant_gbfs_urls (line 77) | def relevant_gbfs_urls do
method http_client (line 92) | defp http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
FILE: apps/transport/lib/jobs/geo_data/irve_to_geo_data.ex
class Transport.Jobs.IRVEToGeoData (line 1) | defmodule Transport.Jobs.IRVEToGeoData
method perform (line 9) | def perform(%Oban.Job{}) do
method prepare_data_for_insert (line 13) | def prepare_data_for_insert(body, geo_data_import_id) do
FILE: apps/transport/lib/jobs/geo_data/lez_to_geo_data.ex
class Transport.Jobs.LowEmissionZonesToGeoData (line 1) | defmodule Transport.Jobs.LowEmissionZonesToGeoData
method perform (line 9) | def perform(%{}) do
method prepare_data_for_insert (line 13) | def prepare_data_for_insert(body, geo_data_import_id) do
method filter_dates (line 45) | def filter_dates(%{"properties" => properties}) do
FILE: apps/transport/lib/jobs/gtfs_diff_job.ex
class Transport.Jobs.GTFSDiff (line 1) | defmodule Transport.Jobs.GTFSDiff
method perform (line 8) | def perform(%Oban.Job{args: args} = job) do
method timeout (line 38) | def timeout(_job), do: :timer.seconds(job_timeout_sec())
method job_timeout_sec (line 41) | def job_timeout_sec, do: 30 * 60
method process_s3_objects (line 43) | defp process_s3_objects(job_id, gtfs_object_1, gtfs_object_2, args) do
method process_urls (line 55) | defp process_urls(job_id, gtfs_url_1, gtfs_url_2, args) do
method download_resource (line 70) | defp download_resource(url) do
method mk_tmp_file (line 89) | defp mk_tmp_file do
method unzip_local (line 93) | defp unzip_local(file) do
method process_diff (line 97) | defp process_diff(job_id, unzip_1, unzip_2, %{
FILE: apps/transport/lib/jobs/gtfs_import_stops_job.ex
class Transport.Jobs.GTFSImportStopsJob (line 1) | defmodule Transport.Jobs.GTFSImportStopsJob
method perform (line 10) | def perform(%Oban.Job{} = job) do
method refresh_all (line 24) | def refresh_all do
method clean_up_stale_imports (line 61) | def clean_up_stale_imports do
method active_up_to_date_datasets_resource_history_items (line 103) | def active_up_to_date_datasets_resource_history_items do
method refresh (line 114) | def refresh(resource_history_items) do
method safe_call (line 145) | def safe_call(cb, default) do
FILE: apps/transport/lib/jobs/gtfs_multi_validation_job.ex
class Transport.Jobs.GTFSValidationJob (line 1) | defmodule Transport.Jobs.GTFSValidationJob
method perform (line 8) | def perform(%Oban.Job{args: args}) do
FILE: apps/transport/lib/jobs/gtfs_rt_metadata.ex
class Transport.Jobs.GTFSRTMetadataDispatcherJob (line 1) | defmodule Transport.Jobs.GTFSRTMetadataDispatcherJob
method perform (line 14) | def perform(%Oban.Job{}) do
method remove_old_metadata (line 24) | def remove_old_metadata do
method relevant_resources (line 34) | def relevant_resources do
class Transport.Jobs.GTFSRTMetadataJob (line 43) | defmodule Transport.Jobs.GTFSRTMetadataJob
method perform (line 54) | def perform(%Oban.Job{args: %{"resource_id" => id}}) do
method present_entities (line 74) | def present_entities(count_entities) do
method process_feed (line 82) | def process_feed({:ok, %TransitRealtime.FeedMessage{} = feed}, %Resour...
method process_feed (line 95) | def process_feed({:error, _}, %Resource{id: id}) do
method days_to_keep (line 99) | def days_to_keep, do: 7
method datetime_limit (line 101) | def datetime_limit, do: DateTime.utc_now() |> DateTime.add(-days_to_ke...
FILE: apps/transport/lib/jobs/gtfs_rt_multi_validation_job.ex
class Transport.Jobs.GTFSRTMultiValidationDispatcherJob (line 1) | defmodule Transport.Jobs.GTFSRTMultiValidationDispatcherJob
method perform (line 10) | def perform(%Oban.Job{}) do
method relevant_datasets (line 23) | def relevant_datasets do
class Transport.Jobs.GTFSRTMultiValidationJob (line 32) | defmodule Transport.Jobs.GTFSRTMultiValidationJob
method perform (line 40) | def perform(%Oban.Job{args: %{"dataset_id" => dataset_id}}) do
method perform (line 44) | def perform(%Oban.Job{args: %{"resource_id" => resource_id}}) do
FILE: apps/transport/lib/jobs/gtfs_to_db.ex
class Transport.Jobs.GtfsToDB (line 1) | defmodule Transport.Jobs.GtfsToDB
method import_gtfs_from_resource_history (line 8) | def import_gtfs_from_resource_history(resource_history_id) do
method import_gtfs_from_resource_history (line 19) | def import_gtfs_from_resource_history(resource_history_id, :stops_and_...
method file_stream (line 26) | def file_stream(resource_history_id, gtfs_file_name) do
method fill_stops_from_resource_history (line 32) | def fill_stops_from_resource_history(resource_history_id, data_import_...
method stops_stream_insert (line 37) | def stops_stream_insert(file_stream, data_import_id) do
method fill_calendar_from_resource_history (line 58) | def fill_calendar_from_resource_history(resource_history_id, data_impo...
method calendar_stream_insert (line 63) | def calendar_stream_insert(file_stream, data_import_id) do
method get_dow_array (line 97) | def get_dow_array([_monday, _tuesday, _wednesday, _thursday, _friday, ...
method fill_stop_times_from_resource_history (line 104) | def fill_stop_times_from_resource_history(resource_history_id, data_im...
method stop_times_stream_insert (line 109) | def stop_times_stream_insert(file_stream, data_import_id) do
method cast_binary_to_interval (line 142) | def cast_binary_to_interval(s) do
method fill_calendar_dates_from_resource_history (line 158) | def fill_calendar_dates_from_resource_history(resource_history_id, dat...
method calendar_dates_stream_insert (line 163) | def calendar_dates_stream_insert(file_stream, data_import_id) do
method fill_trips_from_resource_history (line 184) | def fill_trips_from_resource_history(resource_history_id, data_import_...
method trips_stream_insert (line 189) | def trips_stream_insert(file_stream, data_import_id) do
method fill_agency_from_resource_history (line 210) | def fill_agency_from_resource_history(resource_history_id, data_import...
method agency_stream_insert (line 215) | def agency_stream_insert(file_stream, data_import_id) do
FILE: apps/transport/lib/jobs/import_companies_job.ex
class Transport.Jobs.ImportCompaniesJob (line 1) | defmodule Transport.Jobs.ImportCompaniesJob
method perform (line 29) | def perform(%Oban.Job{args: %{"siren" => siren}}) do
method sirens (line 44) | def sirens do
method parse_date (line 53) | defp parse_date(nil), do: nil
method parse_date (line 54) | defp parse_date(value), do: datetime_to_date(value)
method parse_float (line 61) | defp parse_float(nil), do: nil
method parse_float (line 62) | defp parse_float(value), do: String.to_float(value)
method upsert! (line 64) | defp upsert!(siren, result) do
FILE: apps/transport/lib/jobs/import_dataset_contact_points_job.ex
class Transport.Jobs.ImportDatasetContactPointsJob (line 1) | defmodule Transport.Jobs.ImportDatasetContactPointsJob
method perform (line 26) | def perform(%Oban.Job{}) do
method dataset_datagouv_ids (line 37) | def dataset_datagouv_ids do
method import_contact_point (line 41) | def import_contact_point(datagouv_id) do
method guess_identity (line 70) | def guess_identity(name) do
method update_contact_points (line 83) | defp update_contact_points(%DB.Dataset{id: dataset_id}, []) do
method update_contact_points (line 92) | defp update_contact_points(%DB.Dataset{} = dataset, contact_points) do
method update_contact_point (line 103) | defp update_contact_point(%DB.Dataset{} = _dataset, %{"email" => nil} ...
method update_contact_point (line 107) | defp update_contact_point(%DB.Dataset{} = dataset, %{"email" => _, "na...
method find_or_create_contact (line 113) | defp find_or_create_contact(%{"email" => email, "name" => name}) do
method private_individual? (line 124) | defp private_individual?(nil), do: false
method private_individual? (line 126) | defp private_individual?(name) do
method guess_first_and_last_name (line 130) | defp guess_first_and_last_name(name) do
method upcase? (line 145) | defp upcase?(value), do: value == String.upcase(value)
FILE: apps/transport/lib/jobs/import_dataset_follower_reuser_improved_data_job.ex
class Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob (line 1) | defmodule Transport.Jobs.ImportDatasetFollowerReuserImprovedDataJob
method perform (line 10) | def perform(%Oban.Job{}) do
method relevant_contacts (line 24) | def relevant_contacts do
method relevant_dataset_ids (line 36) | def relevant_dataset_ids do
FILE: apps/transport/lib/jobs/import_dataset_followers_job.ex
class Transport.Jobs.ImportDatasetFollowersJob (line 1) | defmodule Transport.Jobs.ImportDatasetFollowersJob
method perform (line 21) | def perform(%Oban.Job{}) do
method delete_producers_following_their_datasets (line 35) | def delete_producers_following_their_datasets do
method import_dataset_followers (line 46) | def import_dataset_followers(%DB.Dataset{id: dataset_id, datagouv_id: ...
method contact_is_producer? (line 74) | def contact_is_producer?({_, %{organization_ids: contact_org_ids}}, %D...
method contact_details (line 87) | def contact_details do
FILE: apps/transport/lib/jobs/import_dataset_monthly_metrics_job.ex
class Transport.Jobs.ImportDatasetMonthlyMetricsJob (line 1) | defmodule Transport.Jobs.ImportDatasetMonthlyMetricsJob
method perform (line 18) | def perform(%Oban.Job{}) do
method dataset_datagouv_ids (line 29) | def dataset_datagouv_ids do
class Transport.Jobs.ImportMonthlyMetrics (line 34) | defmodule Transport.Jobs.ImportMonthlyMetrics
method already_imported? (line 80) | def already_imported?(:dataset, datagouv_id) do
method already_imported? (line 84) | def already_imported?(:resource, datagouv_id) do
method on_conflict (line 112) | defp on_conflict(:dataset, _), do: []
method on_conflict (line 113) | defp on_conflict(:resource, %{"dataset_id" => dataset_datagouv_id}), d...
method metrics_for_model (line 115) | defp metrics_for_model(:dataset, %{
method metrics_for_model (line 122) | defp metrics_for_model(:resource, %{"monthly_download_resource" => mon...
method changeset (line 126) | defp changeset(:dataset, %{datagouv_id: dataset_datagouv_id} = params,...
method changeset (line 131) | defp changeset(:resource, %{datagouv_id: resource_datagouv_id} = param...
method api_args (line 140) | defp api_args(:dataset, datagouv_id: datagouv_id, page_size: page_size...
method api_args (line 144) | defp api_args(:resource, datagouv_id: datagouv_id, page_size: page_siz...
method http_client (line 152) | defp http_client, do: Transport.Req.impl()
FILE: apps/transport/lib/jobs/import_gbfs_feed_contact_email_job.ex
class Transport.Jobs.ImportGBFSFeedContactEmailJob (line 1) | defmodule Transport.Jobs.ImportGBFSFeedContactEmailJob
method perform (line 21) | def perform(%Oban.Job{}) do
method update_feed_contact_email (line 25) | def update_feed_contact_email(
method find_or_create_contact (line 43) | defp find_or_create_contact(%{resource_url: resource_url, feed_contact...
method contact_title (line 67) | def contact_title(resource_url) do
method gbfs_feed_contact_emails (line 76) | def gbfs_feed_contact_emails do
FILE: apps/transport/lib/jobs/import_resource_monthly_metrics_job.ex
class Transport.Jobs.ImportResourceMonthlyMetricsJob (line 1) | defmodule Transport.Jobs.ImportResourceMonthlyMetricsJob
method perform (line 18) | def perform(%Oban.Job{}) do
method resource_datagouv_ids (line 29) | def resource_datagouv_ids do
FILE: apps/transport/lib/jobs/import_reuses_job.ex
class Transport.Jobs.ImportReusesJob (line 1) | defmodule Transport.Jobs.ImportReusesJob
method perform (line 15) | def perform(%Oban.Job{}) do
method truncate_reuses (line 24) | defp truncate_reuses, do: DB.Repo.delete_all(DB.Reuse)
method import_all_reuses (line 26) | defp import_all_reuses do
method orphan_reuse? (line 40) | defp orphan_reuse?(datasets, datagouv_ids) do
method dataset_datagouv_ids (line 44) | defp dataset_datagouv_ids do
method empty_optional_fields? (line 51) | defp empty_optional_fields?(attributes) do
method http_client (line 58) | defp http_client, do: Transport.Req.impl()
FILE: apps/transport/lib/jobs/irve_raw_consolidation_job.ex
class Transport.Jobs.IRVERawConsolidationJob (line 1) | defmodule Transport.Jobs.IRVERawConsolidationJob
method perform (line 9) | def perform(%Oban.Job{args: args}) do
method build_filter (line 37) | def build_filter(nil = _limit), do: nil
FILE: apps/transport/lib/jobs/irve_simple_consolidation_job.ex
class Transport.Jobs.IRVESimpleConsolidationJob (line 1) | defmodule Transport.Jobs.IRVESimpleConsolidationJob
method perform (line 10) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/multi_validation_with_error_notification_job.ex
class Transport.Jobs.MultiValidationWithErrorNotificationJob (line 1) | defmodule Transport.Jobs.MultiValidationWithErrorNotificationJob
method perform (line 53) | def perform(%Oban.Job{
method perform (line 73) | def perform(%Oban.Job{id: job_id, inserted_at: %DateTime{} = inserted_...
method enqueue_next_job (line 81) | defp enqueue_next_job(validations, attempt) do
method send_notifications_for_dataset (line 105) | def send_notifications_for_dataset({%DB.Dataset{} = dataset, multi_val...
method send_to_reusers (line 130) | defp send_to_reusers(subscriptions, %DB.Dataset{} = dataset,
method send_to_producers (line 148) | defp send_to_producers(subscriptions, %DB.Dataset{} = dataset, multi_v...
method multi_validation_to_resource (line 165) | defp multi_validation_to_resource(%DB.MultiValidation{
method multi_validation_to_resource (line 170) | defp multi_validation_to_resource(%DB.MultiValidation{resource: %DB.Re...
method send_mail (line 172) | defp send_mail(
method save_notification (line 182) | defp save_notification(%DB.Dataset{} = dataset, %DB.NotificationSubscr...
method save_notification (line 196) | defp save_notification(%DB.Dataset{} = dataset, %DB.NotificationSubscr...
method relevant_validations (line 211) | def relevant_validations(%DateTime{} = inserted_at) do
method relevant_static_validations (line 222) | defp relevant_static_validations(%DateTime{} = datetime_limit) do
method relevant_realtime_validations (line 239) | defp relevant_realtime_validations(%DateTime{} = datetime_limit) do
method relevant_realtime_validation? (line 254) | def relevant_realtime_validation?(%DB.MultiValidation{validator: @gtfs...
method relevant_realtime_validation? (line 258) | def relevant_realtime_validation?(%DB.MultiValidation{}), do: true
method subscriptions (line 260) | defp subscriptions(%DB.Dataset{} = dataset, role, validator_name) do
method reject_already_sent (line 266) | defp reject_already_sent(notification_subscriptions, %DB.Dataset{} = d...
method all_validators (line 274) | def all_validators, do: static_data_validators() ++ realtime_data_vali...
method sending_delay_by_validator (line 285) | def sending_delay_by_validator(validator) do
method email_addresses_already_sent (line 298) | def email_addresses_already_sent(%DB.Dataset{id: dataset_id}, validato...
method static_data_validators (line 319) | def static_data_validators,
method realtime_data_validators (line 322) | def realtime_data_validators,
FILE: apps/transport/lib/jobs/netex_poller_job.ex
class Transport.Jobs.NeTExPollerJob (line 1) | defmodule Transport.Jobs.NeTExPollerJob
method backoff (line 25) | def backoff(%Oban.Job{} = job) do
method perform (line 32) | def perform(%Oban.Job{
method perform (line 43) | def perform(%Oban.Job{
method proceed (line 53) | defp proceed(validation_id, resource_history_id, attempt, metadata) do
method snooze_poller (line 60) | def snooze_poller(attempt) do
FILE: apps/transport/lib/jobs/new_comments_notification_job.ex
class Transport.Jobs.NewCommentsNotificationJob (line 1) | defmodule Transport.Jobs.NewCommentsNotificationJob
method perform (line 17) | def perform(%Oban.Job{args: %{"contact_id" => contact_id, "dataset_ids...
method perform (line 41) | def perform(%Oban.Job{scheduled_at: %DateTime{} = scheduled_at}) do
method relevant_contacts (line 57) | def relevant_contacts(%DateTime{} = datetime) do
method relevant_datasets_query (line 73) | def relevant_datasets_query(%DateTime{} = datetime) do
method nb_days_delay (line 88) | def nb_days_delay(%Date{} = date) do
FILE: apps/transport/lib/jobs/new_datagouv_datasets_job.ex
class Transport.Jobs.NewDatagouvDatasetsJob (line 1) | defmodule Transport.Jobs.NewDatagouvDatasetsJob
method perform (line 78) | def perform(%Oban.Job{args: %{"check_rules" => true}}) do
method perform (line 93) | def perform(%Oban.Job{inserted_at: %DateTime{} = inserted_at}) do
method rules (line 99) | def rules, do: @rules
method filtered_datasets (line 101) | def filtered_datasets(datasets, %DateTime{} = inserted_at) do
method starting_date (line 114) | def starting_date(%DateTime{} = inserted_at) do
method window (line 126) | def window(%Date{} = inserted_at) do
method ignore_dataset? (line 145) | def ignore_dataset?(%{"organization" => %{"id" => "5a83f81fc751df6f857...
method ignore_dataset? (line 149) | def ignore_dataset?(%{}), do: false
method dataset_is_relevant? (line 151) | def dataset_is_relevant?(%{} = dataset, rule) do
method datagouv_datasets (line 166) | defp datagouv_datasets do
method match_for_rules (line 176) | defp match_for_rules(datasets) do
method send_emails (line 182) | defp send_emails([], %DateTime{}), do: :ok
method send_emails (line 184) | defp send_emails(matches, inserted_at) do
method rule_explanation (line 197) | def rule_explanation(%{schemas: schemas, tags: tags, formats: formats}...
method title_is_relevant? (line 216) | defp title_is_relevant?(%{"title" => title}, rule), do: string_matches...
method description_is_relevant? (line 217) | defp description_is_relevant?(%{"description" => description}, rule), ...
method string_matches? (line 219) | defp string_matches?(nil, _rule), do: false
method tags_is_relevant? (line 236) | defp tags_is_relevant?(%{"tags" => tags} = _dataset, rule) do
method resource_is_relevant? (line 240) | defp resource_is_relevant?(%{} = resource, rule) do
method resource_format_is_relevant? (line 248) | defp resource_format_is_relevant?(%{"format" => nil}, _rule), do: false
method resource_format_is_relevant? (line 250) | defp resource_format_is_relevant?(%{"format" => format}, %{formats: fo...
method resource_schema_is_relevant? (line 254) | defp resource_schema_is_relevant?(%{"schema" => %{"name" => schema_nam...
method resource_schema_is_relevant? (line 258) | defp resource_schema_is_relevant?(%{}, _rule), do: false
method normalize (line 272) | def normalize(value) do
method normalize_grapheme (line 279) | defp normalize_grapheme(grapheme) do
FILE: apps/transport/lib/jobs/new_dataset_notifications_job.ex
class Transport.Jobs.NewDatasetNotificationsJob (line 1) | defmodule Transport.Jobs.NewDatasetNotificationsJob
method perform (line 11) | def perform(%Oban.Job{id: job_id, inserted_at: %DateTime{} = inserted_...
method relevant_datasets (line 16) | def relevant_datasets(%DateTime{} = inserted_at) do
method send_new_dataset_notifications (line 25) | def send_new_dataset_notifications([], _job_id), do: :ok
method send_new_dataset_notifications (line 27) | def send_new_dataset_notifications(datasets, job_id) do
FILE: apps/transport/lib/jobs/notification_subscription_producer_job.ex
class Transport.Jobs.NotificationSubscriptionProducerJob (line 1) | defmodule Transport.Jobs.NotificationSubscriptionProducerJob
method perform (line 14) | def perform(%Oban.Job{}) do
method create_producer_subscriptions (line 35) | defp create_producer_subscriptions(subscriptions) do
method create_subscriptions (line 44) | defp create_subscriptions(%{contact_id: _, dataset_id: _} = attrs) do
FILE: apps/transport/lib/jobs/oban_logger.ex
class Transport.Jobs.ObanLogger (line 1) | defmodule Transport.Jobs.ObanLogger
method email_on_failure_tag (line 21) | def email_on_failure_tag, do: @tag_email_on_failure
method handle_event (line 23) | def handle_event(
method setup (line 44) | def setup do
FILE: apps/transport/lib/jobs/on_demand_netex_poller_job.ex
class Transport.Jobs.OnDemandNeTExPollerJob (line 1) | defmodule Transport.Jobs.OnDemandNeTExPollerJob
method backoff (line 28) | def backoff(%Oban.Job{} = job) do
method perform (line 35) | def perform(%Oban.Job{args: %{"id" => multivalidation_id} = args, atte...
method later (line 40) | def later(validation_id, multivalidation_id, metadata, url) do
method check_result (line 48) | def check_result(%{"permanent_url" => url, "validation_id" => validati...
method check_result (line 56) | def check_result(%{"permanent_url" => _, "validation_id" => _} = args,...
method handle_error (line 60) | def handle_error(error_result) do
method handle_success (line 66) | def handle_success(ok_result, url) do
method handle_pending (line 72) | def handle_pending(attempt) do
method build_successful_validation_result (line 78) | defp build_successful_validation_result(%{"validations" => validation,...
method build_error_validation_result (line 93) | defp build_error_validation_result(%{message: msg}) do
FILE: apps/transport/lib/jobs/on_demand_validation_helpers.ex
class Transport.Jobs.OnDemandValidationHelpers (line 1) | defmodule Transport.Jobs.OnDemandValidationHelpers
method terminal_state (line 8) | def terminal_state(result), do: {:terminal, result}
method delegated_state (line 9) | def delegated_state, do: :delegated
method snoozed_state (line 10) | def snoozed_state(duration_in_seconds), do: {:snooze, duration_in_seco...
method completed (line 12) | def completed, do: %{"state" => "completed"}
method error (line 14) | def error(error_message), do: %{"state" => "error", "error_reason" => ...
method handle_validation_result (line 16) | def handle_validation_result(result, multivalidation_id) do
method update_multivalidation (line 24) | defp update_multivalidation(multivalidation_id, changes) do
FILE: apps/transport/lib/jobs/on_demand_validation_job.ex
class Transport.Jobs.OnDemandValidationJob (line 1) | defmodule Transport.Jobs.OnDemandValidationJob
method perform (line 23) | def perform(%Oban.Job{args: %{"id" => multivalidation_id, "state" => "...
method perform_validation (line 38) | defp perform_validation(%{"type" => "gtfs", "permanent_url" => url}) do
method perform_validation (line 62) | defp perform_validation(%{"type" => "gtfs-flex", "permanent_url" => ur...
method perform_validation (line 81) | defp perform_validation(%{"type" => "netex", "id" => multivalidation_i...
method perform_validation (line 94) | defp perform_validation(%{
method perform_validation (line 117) | defp perform_validation(%{
method perform_validation (line 146) | defp perform_validation(%{
method normalize_download (line 169) | defp normalize_download(result) do
method remove_files (line 176) | defp remove_files(paths) do
method process_download (line 181) | defp process_download([{:ok, gtfs_path}, {:ok, gtfs_rt_path}]) do
method process_download (line 185) | defp process_download(results) do
method run_save_gtfs_rt_validation (line 191) | defp run_save_gtfs_rt_validation(gtfs_path, gtfs_rt_path, opts \\ []) do
method gtfs_rt_result_path (line 231) | def gtfs_rt_result_path(gtfs_rt_path) do
method download_from_url (line 236) | defp download_from_url(url, path) do
method get_request (line 254) | defp get_request(url) do
FILE: apps/transport/lib/jobs/periodic_reminder_producers_notification_job.ex
class Transport.Jobs.PeriodicReminderProducersNotificationJob (line 1) | defmodule Transport.Jobs.PeriodicReminderProducersNotificationJob
method perform (line 37) | def perform(%Oban.Job{args: %{"contact_id" => contact_id}}) do
method relevant_contacts (line 56) | defp relevant_contacts do
method schedule_jobs (line 81) | defp schedule_jobs(contacts, %DateTime{} = scheduled_at) do
method sent_mail_recently? (line 97) | def sent_mail_recently?(%DB.Contact{email: email}) do
method send_mail_producer_without_subscriptions (line 105) | defp send_mail_producer_without_subscriptions(%DB.Contact{organization...
method send_mail_producer_with_subscriptions (line 121) | defp send_mail_producer_with_subscriptions(%DB.Contact{} = contact) do
method save_notification (line 135) | defp save_notification(%DB.Contact{id: contact_id, email: email}, temp...
method datasets_subscribed_as_producer (line 146) | def datasets_subscribed_as_producer(%DB.Contact{notification_subscript...
method subscribed_as_producer? (line 155) | def subscribed_as_producer?(%DB.Contact{notification_subscriptions: su...
method other_producers_subscribers (line 160) | def other_producers_subscribers(%DB.Contact{id: contact_id} = contact) do
method chunk_size (line 175) | def chunk_size do
method first_monday_of_month (line 191) | def first_monday_of_month(%Date{} = date) do
method next_weekday (line 211) | def next_weekday(%DateTime{} = datetime) do
FILE: apps/transport/lib/jobs/promote_producer_space_job.ex
class Transport.Jobs.PromoteProducerSpaceJob (line 1) | defmodule Transport.Jobs.PromoteProducerSpaceJob
method perform (line 15) | def perform(%Oban.Job{args: %{"contact_id" => contact_id}}) do
method save_notification (line 34) | defp save_notification(%DB.Contact{id: contact_id, email: email}) do
method create_producer_subscriptions (line 43) | defp create_producer_subscriptions(%DB.Contact{id: contact_id}, datase...
FILE: apps/transport/lib/jobs/promote_reuser_space_job.ex
class Transport.Jobs.PromoteReuserSpaceJob (line 1) | defmodule Transport.Jobs.PromoteReuserSpaceJob
method perform (line 9) | def perform(%Oban.Job{args: %{"contact_id" => contact_id}}) do
method save_notification (line 19) | defp save_notification(%DB.Contact{id: contact_id, email: email}) do
FILE: apps/transport/lib/jobs/refresh_autocomplete_job.ex
class Transport.Jobs.RefreshAutocompleteJob (line 1) | defmodule Transport.Jobs.RefreshAutocompleteJob
method perform (line 10) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/remove_history_job.ex
class Transport.Jobs.RemoveHistoryJob (line 1) | defmodule Transport.Jobs.RemoveHistoryJob
method perform (line 18) | def perform(%Oban.Job{args: %{"dataset_type" => dataset_type}}) do
method perform (line 47) | def perform(%Oban.Job{args: %{"action" => "remove"} = args}) do
method remove_objects_and_enqueue_job (line 95) | defp remove_objects_and_enqueue_job(objects, %{"action" => "remove"} =...
method mark_for_deletion (line 103) | defp mark_for_deletion(ids) do
method remove_resource_history_rows (line 110) | defp remove_resource_history_rows(ids) do
method remove_s3_objects (line 114) | defp remove_s3_objects(paths) do
FILE: apps/transport/lib/jobs/resource_history_job.ex
class Transport.Jobs.ResourceHistoryAndValidationDispatcherJob (line 1) | defmodule Transport.Jobs.ResourceHistoryAndValidationDispatcherJob
method perform (line 10) | def perform(%Oban.Job{args: %{"mode" => "reuser_improved_data"}}) do
method perform (line 24) | def perform(_job) do
method resources_to_historise (line 38) | def resources_to_historise(resource_id \\ nil) do
class Transport.Jobs.ResourceHistoryJob (line 59) | defmodule Transport.Jobs.ResourceHistoryJob
method perform (line 81) | def perform(%Oban.Job{args: %{"resource_id" => resource_id}} = job) do
method perform (line 89) | def perform(%Oban.Job{args: %{"reuser_improved_data_id" => reuser_impr...
method handle_history (line 94) | defp handle_history([], %Oban.Job{} = job) do
method handle_history (line 100) | defp handle_history([%DB.Resource{} = resource], %Oban.Job{} = job) do
method handle_history (line 104) | defp handle_history(%DB.ReuserImprovedData{} = reuser_improved_data, %...
method do_handle_history (line 108) | defp do_handle_history(data, %Oban.Job{} = job) do
method timeout (line 129) | def timeout(_job), do: :timer.minutes(2)
method process_download (line 131) | defp process_download({:error, message}, %DB.Resource{id: resource_id}...
method process_download (line 138) | defp process_download({:ok, resource_path, headers}, resource_or_impro...
method should_store_resource? (line 195) | def should_store_resource?(_, []), do: false
method should_store_resource? (line 196) | def should_store_resource?(_, nil), do: false
method should_store_resource? (line 198) | def should_store_resource?(%DB.Resource{id: resource_id}, resource_has...
method should_store_resource? (line 207) | def should_store_resource?(%DB.ReuserImprovedData{id: reuser_improved_...
method compare_history (line 216) | defp compare_history(history, resource_hash) do
method same_resource? (line 229) | def same_resource?(%DB.ResourceHistory{payload: %{"zip_metadata" => rh...
method same_resource? (line 233) | def same_resource?(%DB.ResourceHistory{payload: %{"content_hash" => rh...
method same_resource? (line 237) | def same_resource?(nil, _), do: false
method set_of_sha256 (line 239) | def set_of_sha256(items) do
method resource_hash (line 243) | defp resource_hash(data, resource_path) do
method store_resource_history! (line 268) | defp store_resource_history!(%DB.Resource{} = resource, payload) do
method store_resource_history! (line 288) | defp store_resource_history!(%DB.ReuserImprovedData{id: reuser_improve...
method touch_resource_history! (line 297) | defp touch_resource_history!(%DB.ResourceHistory{id: id, resource_id: ...
method download_path (line 303) | defp download_path(%DB.Resource{id: resource_id}) do
method download_path (line 307) | defp download_path(%DB.ReuserImprovedData{id: reuser_improved_data_id}...
method download_resource (line 311) | def download_resource(%DB.Resource{url: url}, file_path) do
method download_resource (line 315) | def download_resource(%DB.ReuserImprovedData{download_url: download_ur...
method http_client (line 336) | def http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
method remove_file (line 338) | def remove_file(path), do: File.rm(path)
method upload_filename (line 340) | def upload_filename(%DB.Resource{id: resource_id} = resource, resource...
method upload_filename (line 346) | def upload_filename(
method file_extension (line 359) | def file_extension(%DB.Resource{format: format}, resource_path) do
method file_extension (line 367) | def file_extension(%DB.ReuserImprovedData{}, resource_path) do
method relevant_http_headers (line 375) | def relevant_http_headers(%HTTPoison.Response{headers: headers}) do
method relevant_http_headers (line 402) | def relevant_http_headers(%Req.Response{headers: headers}) do
method cleanup_header (line 408) | defp cleanup_header("content-disposition", binary) do
method cleanup_header (line 418) | defp cleanup_header(_header, binary), do: binary
method latest_schema_version_to_date (line 420) | defp latest_schema_version_to_date(%DB.Resource{schema_name: nil}), do...
method latest_schema_version_to_date (line 422) | defp latest_schema_version_to_date(%DB.Resource{schema_name: schema_na...
method historize_and_validate_job (line 428) | def historize_and_validate_job(first_jobs_args, options \\ []) do
FILE: apps/transport/lib/jobs/resource_history_jsonschema_validation_job.ex
class Transport.Jobs.ResourceHistoryJSONSchemaValidationJob (line 1) | defmodule Transport.Jobs.ResourceHistoryJSONSchemaValidationJob
method perform (line 22) | def perform(%Oban.Job{}) do
method validator (line 26) | def validator, do: Transport.Validators.JSONSchema
FILE: apps/transport/lib/jobs/resource_history_schema_validation.ex
class Transport.Jobs.ResourceHistorySchemaValidation (line 1) | defmodule Transport.Jobs.ResourceHistorySchemaValidation
method validate_resource_history_for_schema (line 11) | def validate_resource_history_for_schema(resource_history_id, validato...
FILE: apps/transport/lib/jobs/resource_history_tableschema_validation_job.ex
class Transport.Jobs.ResourceHistoryTableSchemaValidationJob (line 1) | defmodule Transport.Jobs.ResourceHistoryTableSchemaValidationJob
method perform (line 22) | def perform(%Oban.Job{}) do
method validator (line 26) | def validator, do: Transport.Validators.TableSchema
FILE: apps/transport/lib/jobs/resource_history_validata_json_job.ex
class Transport.Jobs.ResourceHistoryValidataJSONJob (line 1) | defmodule Transport.Jobs.ResourceHistoryValidataJSONJob
method perform (line 25) | def perform(%Oban.Job{}) do
method validator (line 29) | def validator, do: Transport.Validators.ValidataJson
FILE: apps/transport/lib/jobs/resource_history_validation_job.ex
class Transport.Jobs.ResourceHistoryValidationJob (line 1) | defmodule Transport.Jobs.ResourceHistoryValidationJob
method perform (line 21) | def perform(%Oban.Job{args: %{"format" => format, "validator" => valid...
method validate (line 90) | defp validate(%DB.ResourceHistory{} = resource_history, validator, for...
method run_validation (line 94) | defp run_validation(%DB.ResourceHistory{} = resource_history, validato...
FILE: apps/transport/lib/jobs/resource_unavailable_job.ex
class Transport.Jobs.ResourcesUnavailableDispatcherJob (line 1) | defmodule Transport.Jobs.ResourcesUnavailableDispatcherJob
method perform (line 11) | def perform(%Oban.Job{args: args}) do
method resources_to_check (line 25) | def resources_to_check(false = _only_unavailable) do
method resources_to_check (line 33) | def resources_to_check(true = _only_unavailable) do
class Transport.Jobs.ResourceUnavailableJob (line 41) | defmodule Transport.Jobs.ResourceUnavailableJob
method perform (line 53) | def perform(%Oban.Job{args: %{"resource_id" => resource_id}}) do
method maybe_update_url (line 67) | defp maybe_update_url(%Resource{filetype: "file", url: url, latest_url...
method maybe_update_url (line 78) | defp maybe_update_url(%Resource{} = resource), do: {:no_op, resource}
method historize_resource (line 80) | defp historize_resource({:no_op, %Resource{}} = payload), do: payload
method historize_resource (line 82) | defp historize_resource({:updated, %Resource{id: resource_id}} = paylo...
method check_availability (line 91) | defp check_availability({:updated, %Resource{} = resource}) do
method check_availability (line 95) | defp check_availability({:no_op, %Resource{id: resource_id, format: fo...
method skip_resource_ids (line 108) | def skip_resource_ids do
method update_resource (line 112) | defp update_resource({is_available, %Resource{} = resource}) do
method create_or_update_resource_unavailability (line 117) | def create_or_update_resource_unavailability({false = _is_available, %...
method create_or_update_resource_unavailability (line 130) | def create_or_update_resource_unavailability({true = _is_available, %R...
method now (line 177) | defp now, do: DateTime.utc_now() |> DateTime.truncate(:second)
method http_client (line 178) | defp http_client, do: Transport.Shared.Wrapper.HTTPoison.impl()
method timeout (line 181) | def timeout(_job), do: :timer.seconds(30)
FILE: apps/transport/lib/jobs/resource_unavailable_notification_job.ex
class Transport.Jobs.ResourceUnavailableNotificationJob (line 1) | defmodule Transport.Jobs.ResourceUnavailableNotificationJob
method perform (line 53) | def perform(%Oban.Job{
method perform (line 109) | def perform(%Oban.Job{id: job_id, inserted_at: %DateTime{} = inserted_...
method enqueue_next_job (line 141) | defp enqueue_next_job(%DB.Dataset{id: dataset_id}, resources, hours_co...
method send_to_reusers (line 154) | defp send_to_reusers(subscriptions, %DB.Dataset{} = dataset, unavailab...
method send_to_producers (line 173) | defp send_to_producers(subscriptions, %DB.Dataset{} = dataset, unavail...
method send_mail (line 191) | defp send_mail(
method save_notification (line 199) | defp save_notification(%DB.NotificationSubscription{role: :reuser} = s...
method save_notification (line 212) | defp save_notification(%DB.NotificationSubscription{role: :producer} =...
method deleted_and_recreated_resource_hosted_on_datagouv (line 233) | def deleted_and_recreated_resource_hosted_on_datagouv(%DB.Dataset{}, [...
method deleted_and_recreated_resource_hosted_on_datagouv (line 235) | def deleted_and_recreated_resource_hosted_on_datagouv(%DB.Dataset{} = ...
method created_resource_hosted_on_datagouv_recently? (line 240) | def created_resource_hosted_on_datagouv_recently?(%DB.Dataset{datagouv...
method parse_datetime (line 255) | defp parse_datetime(value) do
method relevant_unavailabilities (line 260) | def relevant_unavailabilities(%DateTime{} = inserted_at) do
method subscriptions (line 272) | defp subscriptions(%DB.Dataset{} = dataset, role) do
method reject_already_sent (line 278) | defp reject_already_sent(notification_subscriptions, %DB.Dataset{} = d...
method email_addresses_already_sent (line 286) | def email_addresses_already_sent(%DB.Dataset{id: dataset_id}) do
method resource_id (line 296) | defp resource_id(%DB.Resource{id: id}), do: id
method resource_id (line 297) | defp resource_id(%DB.ResourceUnavailability{resource: %DB.Resource{id:...
method resource_title (line 299) | defp resource_title(%DB.Resource{title: title}), do: title
method resource_title (line 300) | defp resource_title(%DB.ResourceUnavailability{resource: %DB.Resource{...
FILE: apps/transport/lib/jobs/resource_validation_job.ex
class Transport.Jobs.ResourceValidationJob (line 1) | defmodule Transport.Jobs.ResourceValidationJob
FILE: apps/transport/lib/jobs/resources_changed_notification_job.ex
class Transport.Jobs.ResourcesChangedNotificationJob (line 1) | defmodule Transport.Jobs.ResourcesChangedNotificationJob
method perform (line 23) | def perform(%Oban.Job{id: job_id, args: %{"dataset_id" => dataset_id}}...
method relevant_datasets (line 35) | def relevant_datasets do
FILE: apps/transport/lib/jobs/stops_registry_snapshot_job.ex
class Transport.Jobs.StopsRegistrySnapshotJob (line 1) | defmodule Transport.Jobs.StopsRegistrySnapshotJob
method perform (line 10) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/table_size_history_job.ex
class Transport.Jobs.TableSizeHistoryJob (line 1) | defmodule Transport.Jobs.TableSizeHistoryJob
method perform (line 8) | def perform(%Oban.Job{}) do
FILE: apps/transport/lib/jobs/update_contacts_job.ex
class Transport.Jobs.UpdateContactsJob (line 1) | defmodule Transport.Jobs.UpdateContactsJob
method perform (line 24) | def perform(%Oban.Job{args: %{"contact_ids" => ids}}) do
method update_contact (line 31) | defp update_contact(%DB.Contact{datagouv_user_id: datagouv_user_id} = ...
FILE: apps/transport/lib/jobs/update_counter_cache_job.ex
class Transport.Jobs.UpdateCounterCacheJob (line 1) | defmodule Transport.Jobs.UpdateCounterCacheJob
FILE: apps/transport/lib/jobs/visit_download_statistics_job.ex
class Transport.Jobs.VisitDownloadStatisticsJob (line 1) | defmodule Transport.Jobs.VisitDownloadStatisticsJob
method perform (line 12) | def perform(%Oban.Job{scheduled_at: %DateTime{} = scheduled_at}) do
method relevant_contacts (line 24) | def relevant_contacts do
FILE: apps/transport/lib/jobs/visit_proxy_statistics_job.ex
class Transport.Jobs.VisitProxyStatisticsJob (line 1) | defmodule Transport.Jobs.VisitProxyStatisticsJob
method perform (line 12) | def perform(%Oban.Job{scheduled_at: %DateTime{} = scheduled_at}) do
method relevant_contacts (line 24) | def relevant_contacts do
FILE: apps/transport/lib/jobs/visit_statistics_base.ex
class Transport.Jobs.VisitStatisticsBase (line 1) | defmodule Transport.Jobs.VisitStatisticsBase
method perform_job (line 24) | def perform_job(
method relevant_contacts (line 45) | def relevant_contacts(resource_filter_fn) do
method email_addresses_already_sent (line 58) | def email_addresses_already_sent(%DateTime{} = scheduled_at, notificat...
method save_notification (line 71) | def save_notification(%DB.Contact{id: contact_id, email: email} = cont...
FILE: apps/transport/lib/jobs/warn_user_inactivity_job.ex
class Transport.Jobs.WarnUserInactivityJob (line 1) | defmodule Transport.Jobs.WarnUserInactivityJob
method perform (line 11) | def perform(%Oban.Job{args: %{}}) do
method inactivity_threshold (line 22) | defp inactivity_threshold(now), do: DateTime.add(now, -30 * 24, :day)
method pruning_threshold (line 23) | defp pruning_threshold(now), do: DateTime.add(now, -30 * 25, :day)
method warn_inactive_contact (line 25) | defp warn_inactive_contact(%DateTime{} = pruning_dt, %DB.Contact{} = c...
method actually_warn_inactive_contact (line 37) | defp actually_warn_inactive_contact(%DB.Contact{} = contact, horizon) do
method horizon_days (line 42) | def horizon_days, do: [30, 15, 1]
method horizon_txt (line 48) | def horizon_txt(horizon) do
method save_notification (line 52) | defp save_notification(%DB.Contact{} = contact, horizon) do
method role (line 62) | defp role(%DB.Contact{} = contact) do
FILE: apps/transport/lib/jobs/workflow.ex
class Transport.Jobs.Workflow (line 1) | defmodule Transport.Jobs.Workflow
method perform (line 58) | def perform(
method execute_jobs (line 67) | defp execute_jobs([job], args, workflow_job) do
method execute_jobs (line 72) | defp execute_jobs([job | tail], args, workflow_job) do
method insert_job (line 111) | def insert_job(job_name, args, workflow_job) do
method handle_event (line 164) | def handle_event(
method handle_event (line 190) | def handle_event(
method map_to_kw (line 211) | def map_to_kw(%{} = m) do
method map_to_kw (line 216) | def map_to_kw(v), do: v
method kw_to_map (line 231) | def kw_to_map([]), do: %{}
method kw_to_map (line 237) | def kw_to_map([{_, _}] = kw) do
method kw_to_map (line 241) | def kw_to_map([head | tail]) do
class Notifier (line 117) | defmodule Notifier
method impl (line 122) | def impl, do: Application.fetch_env!(:transport, :workflow_notifier)
method notify_workflow (line 123) | def notify_workflow(job, args), do: impl().notify_workflow(job, args)
class ObanNotifier (line 126) | defmodule ObanNotifier
method notify_workflow (line 133) | def notify_workflow(%{meta: %{"workflow" => true}}, args) do
method notify_workflow (line 137) | def notify_workflow(_job, _args), do: nil
class ProcessNotifier (line 140) | defmodule ProcessNotifier
method notify_workflow (line 149) | def notify_workflow(%{meta: %{"workflow" => true}}, args) do
method notify_workflow (line 158) | def notify_workflow(_job, _args), do: nil
FILE: apps/transport/lib/jobs/workflow_dummy_jobs.ex
class Transport.Jobs.Dummy (line 1) | defmodule Transport.Jobs.Dummy
class JobA (line 5) | defmodule JobA
method perform (line 12) | def perform(%Oban.Job{args: %{"some_id" => some_id}} = job) do
class JobB (line 26) | defmodule JobB
method perform (line 33) | def perform(%Oban.Job{args: %{"some_id" => some_id}} = job) do
class FailingJob (line 46) | defmodule FailingJob
method perform (line 53) | def perform(%Oban.Job{args: %{"some_id" => some_id}} = job) do
FILE: apps/transport/lib/mailer/admin_notifier.ex
class Transport.AdminNotifier (line 1) | defmodule Transport.AdminNotifier
method contact (line 8) | def contact(email, user_type, question_type, subject, question) do
method feedback (line 14) | def feedback(rating, explanation, email, feature) do
method bnlc_consolidation_report (line 29) | def bnlc_consolidation_report(subject, body, file_url) do
method datasets_without_gtfs_rt_related_resources (line 35) | def datasets_without_gtfs_rt_related_resources(datasets) do
method unknown_gbfs_operator_feeds (line 43) | def unknown_gbfs_operator_feeds(resources) do
method datasets_climate_resilience_bill_inappropriate_licence (line 51) | def datasets_climate_resilience_bill_inappropriate_licence(datasets) do
method new_datagouv_datasets (line 57) | def new_datagouv_datasets(category, datagouv_datasets, rule_explanatio...
method expiration (line 67) | def expiration(records) do
method inactive_datasets (line 73) | def inactive_datasets(reactivated_datasets, inactive_datasets, archive...
method oban_failure (line 83) | def oban_failure(worker) do
method notify_bizdev (line 91) | defp notify_bizdev do
method notify_tech (line 100) | defp notify_tech do
method notify_contact (line 107) | defp notify_contact(form_name, email) do
method expiration_str (line 114) | defp expiration_str({delay, records}) do
method expiration_notification_enabled_str (line 130) | def expiration_notification_enabled_str(%DB.Dataset{} = dataset) do
method climate_resilience_str (line 138) | defp climate_resilience_str(%DB.Dataset{} = dataset) do
method has_expiration_notifications? (line 146) | def has_expiration_notifications?(%DB.Dataset{} = dataset) do
method fmt_inactive_datasets (line 152) | defp fmt_inactive_datasets([]), do: ""
method fmt_inactive_datasets (line 154) | defp fmt_inactive_datasets(inactive_datasets) do
method fmt_reactivated_datasets (line 163) | defp fmt_reactivated_datasets([]), do: ""
method fmt_reactivated_datasets (line 165) | defp fmt_reactivated_datasets(reactivated_datasets) do
method fmt_archived_datasets (line 174) | defp fmt_archived_datasets([]), do: ""
method fmt_archived_datasets (line 176) | defp fmt_archived_datasets(archived_datasets) do
method count_archived_datasets (line 187) | def count_archived_datasets do
method backoffice_archived_datasets_url (line 191) | defp backoffice_archived_datasets_url do
method link_and_name_from_datagouv_payload (line 196) | defp link_and_name_from_datagouv_payload(%{"title" => title, "page" =>...
method link_and_name (line 202) | defp link_and_name(%DB.Dataset{slug: slug, custom_title: custom_title}...
FILE: apps/transport/lib/mailer/mailer.ex
class Transport.Mailer (line 1) | defmodule Transport.Mailer
FILE: apps/transport/lib/mailer/user_notifier.ex
class Transport.UserNotifier (line 1) | defmodule Transport.UserNotifier
method resources_changed (line 8) | def resources_changed(%DB.Contact{} = contact, %DB.Dataset{} = dataset...
method new_comments_reuser (line 15) | def new_comments_reuser(%DB.Contact{} = contact, datasets) do
method new_comments_producer (line 22) | def new_comments_producer(%DB.Contact{} = contact, comments_number, co...
method promote_reuser_space (line 29) | def promote_reuser_space(%DB.Contact{} = contact) do
method dataset_now_on_nap (line 36) | def dataset_now_on_nap(%DB.Contact{} = contact, dataset) do
method datasets_switching_climate_resilience_bill (line 47) | def datasets_switching_climate_resilience_bill(
method multi_validation_with_error_notification (line 61) | def multi_validation_with_error_notification(%DB.Contact{} = contact, ...
method multi_validation_with_error_notification (line 74) | def multi_validation_with_error_notification(%DB.Contact{} = contact, ...
method resource_unavailable (line 87) | def resource_unavailable(%DB.Contact{} = contact, :producer,
method resource_unavailable (line 107) | def resource_unavailable(%DB.Contact{} = contact, :reuser,
method periodic_reminder_producers_no_subscriptions (line 127) | def periodic_reminder_producers_no_subscriptions(%DB.Contact{} = conta...
method periodic_reminder_producers_with_subscriptions (line 134) | def periodic_reminder_producers_with_subscriptions(
method new_datasets (line 149) | def new_datasets(%DB.Contact{} = contact, datasets) do
method expiration_producer (line 156) | def expiration_producer(%DB.Contact{} = contact, dataset, resources, d...
method expiration_reuser (line 167) | def expiration_reuser(%DB.Contact{} = contact, html) do
method promote_producer_space (line 174) | def promote_producer_space(%DB.Contact{} = contact) do
method warn_inactivity (line 183) | def warn_inactivity(%DB.Contact{email: email} = contact, horizon) do
method visit_proxy_statistics (line 190) | def visit_proxy_statistics(%DB.Contact{} = contact) do
method visit_download_statistics (line 197) | def visit_download_statistics(%DB.Contact{} = contact) do
method common_email_options (line 206) | defp common_email_options(%DB.Contact{} = contact) do
method resource_titles (line 220) | def resource_titles(resources) do
FILE: apps/transport/lib/mix/tasks/dump_gtfs_rt.ex
class Mix.Tasks.Decode.GtfsRt (line 1) | defmodule Mix.Tasks.Decode.GtfsRt
method run (line 11) | def run([url]) do
FILE: apps/transport/lib/mix/tasks/logs.ex
class Mix.Tasks.Clever.Logs (line 1) | defmodule Mix.Tasks.Clever.Logs
method fetch_log_page (line 32) | def fetch_log_page(app, end_time) do
method default_start_time (line 62) | def default_start_time do
method default_end_time (line 71) | def default_end_time, do: DateTime.utc_now() |> DateTime.to_iso8601()
method prep_args (line 73) | def prep_args(args) do
method build_next_end_time (line 102) | def build_next_end_time(logs) do
method extract_log_page_and_update_state (line 114) | def extract_log_page_and_update_state(app, state) do
method run (line 126) | def run(args) do
FILE: apps/transport/lib/mix/tasks/npm.ex
class Mix.Tasks.Npm (line 1) | defmodule Mix.Tasks.Npm
method run (line 8) | def run([cmd | _tail]) do
FILE: apps/transport/lib/mix/tasks/transport/add_dataset_subtypes.ex
class Mix.Tasks.Transport.AddDatasetSubtypes (line 1) | defmodule Mix.Tasks.Transport.AddDatasetSubtypes
method run (line 9) | def run(_params) do
method insert! (line 25) | def insert!(parent_type, slug) do
FILE: apps/transport/lib/mix/tasks/transport/add_monaco.ex
class Mix.Tasks.Transport.AddMonaco (line 1) | defmodule Mix.Tasks.Transport.AddMonaco
method run (line 12) | def run(_params) do
method download_geojson (line 26) | defp download_geojson do
method insert_monaco (line 31) | defp insert_monaco(geojson_data) do
FILE: apps/transport/lib/mix/tasks/transport/add_switzerland.ex
class Mix.Tasks.Transport.AddSwitzerland (line 1) | defmodule Mix.Tasks.Transport.AddSwitzerland
method run (line 11) | def run(_params) do
method download_geojson (line 25) | defp download_geojson do
method insert_switzerland (line 30) | defp insert_switzerland(geojson_data) do
FILE: apps/transport/lib/mix/tasks/transport/import_aoms.ex
class Mix.Tasks.Transport.ImportAOMs (line 1) | defmodule Mix.Tasks.Transport.ImportAOMs
method to_int (line 38) | def to_int(""), do: nil
method to_int (line 39) | def to_int("#N/D"), do: nil
method to_int (line 40) | def to_int("#ERROR!"), do: nil
method to_int (line 42) | def to_int(str) do
method changeset (line 51) | def changeset(line) do
method normalize_region (line 79) | defp normalize_region("Grand-Est"), do: "Grand Est"
method normalize_region (line 80) | defp normalize_region("Provence-Alpes-Côte-d'Azur"), do: "Région Sud —...
method normalize_region (line 81) | defp normalize_region("Provence-Alpes-Côte d'Azur"), do: "Région Sud —...
method normalize_region (line 82) | defp normalize_region("Nouvelle Aquitaine"), do: "Nouvelle-Aquitaine"
method normalize_region (line 83) | defp normalize_region("Auvergne-Rhône Alpes"), do: "Auvergne-Rhône-Alpes"
method normalize_region (line 84) | defp normalize_region("Nouvelle Calédonie"), do: "Nouvelle-Calédonie"
method normalize_region (line 85) | defp normalize_region(region), do: region
method normalize_forme (line 88) | defp normalize_forme("CA"), do: "Communauté d'agglomération"
method normalize_forme (line 89) | defp normalize_forme("CU"), do: "Communauté urbaine"
method normalize_forme (line 90) | defp normalize_forme("CC"), do: "Communauté de communes"
method normalize_forme (line 91) | defp normalize_forme("METRO"), do: "Métropole"
method normalize_forme (line 92) | defp normalize_forme("PETR"), do: "Pôle d'équilibre territorial et rural"
method normalize_forme (line 93) | defp normalize_forme("POLEM"), do: "Pôle Métropolitain"
method normalize_forme (line 94) | defp normalize_forme(f), do: f
method normalize_nom (line 97) | defp normalize_nom("SIVOTU (nouvelle dénomination le 24/02/2010:AGGLOB...
method normalize_nom (line 98) | defp normalize_nom("ILE D'YEU"), do: "L'Île-d'Yeu"
method normalize_nom (line 99) | defp normalize_nom(n), do: n
method extract_departement_insee (line 102) | defp extract_departement_insee("977 - Collectivité d’outre-mer de Nouv...
method extract_departement_insee (line 103) | defp extract_departement_insee(insee_and_name), do: insee_and_name |> ...
method run (line 105) | def run(_params) do
method get_aom_to_import (line 144) | defp get_aom_to_import do
method existing_or_new_aom (line 159) | defp existing_or_new_aom(line) do
method import_aoms (line 171) | defp import_aoms(aoms_to_add) do
method delete_old_aoms (line 176) | defp delete_old_aoms(aom_added, old_aoms) do
method import_insee_aom (line 196) | defp import_insee_aom do
method compute_geom (line 229) | defp compute_geom do
method set_main_commune (line 256) | def set_main_commune do
method migrate_datasets_to_new_aoms (line 293) | defp migrate_datasets_to_new_aoms do
method disable_trigger (line 345) | defp disable_trigger do
method enable_trigger (line 349) | defp enable_trigger do
method display_changes (line 353) | defp display_changes(old_aoms, aoms_to_add) do
FILE: apps/transport/lib/mix/tasks/transport/import_communes.ex
class Mix.Tasks.Transport.ImportCommunes (line 1) | defmodule Mix.Tasks.Transport.ImportCommunes
method regions_by_insee (line 20) | def regions_by_insee do
method geojson_by_insee (line 25) | def geojson_by_insee do
method load_etalab_communes (line 42) | def load_etalab_communes(region_insees) do
method insert_or_update_commune (line 54) | def insert_or_update_commune(
method insert_or_update_commune (line 91) | def insert_or_update_commune(%{"code" => "60694", "nom" => "Les Hauts-...
method insert_or_update_commune (line 94) | def insert_or_update_commune(%{"code" => "85165", "nom" => "L'Oie"} = ...
method insert_or_update_commune (line 97) | def insert_or_update_commune(%{"code" => "85212", "nom" => "Sainte-Flo...
method insert_or_update_commune (line 100) | def insert_or_update_commune(%{"code" => "12218", "nom" => "Conques-en...
method insert_or_update_commune (line 103) | def insert_or_update_commune(%{"code" => "14581", "nom" => "Aurseulles...
method insert_or_update_commune (line 106) | def insert_or_update_commune(%{"code" => "15031", "nom" => "Celles"} =...
method insert_or_update_commune (line 109) | def insert_or_update_commune(%{"code" => "15035", "nom" => "Chalinargu...
method insert_or_update_commune (line 112) | def insert_or_update_commune(%{"nom" => "Chavagnac"} = params, regions...
method insert_or_update_commune (line 115) | def insert_or_update_commune(%{"nom" => "Sainte-Anastasie"} = params, ...
method insert_or_update_commune (line 118) | def insert_or_update_commune(%{"nom" => "Orée d'Anjou"} = params, regi...
method insert_or_update_commune (line 121) | def insert_or_update_commune(%{"nom" => "Porte des Pierres Dorées"} = ...
method get_or_create_commune (line 124) | defp get_or_create_commune(insee) do
method build_geometry (line 136) | defp build_geometry(geojsons, insee) do
method readable_changeset (line 143) | defp readable_changeset(%Ecto.Changeset{changes: changes, data: data}) do
method run (line 149) | def run(_params) do
method ensure_valid_geometries (line 188) | defp ensure_valid_geometries,
method update_administrative_division (line 191) | def update_administrative_division do
FILE: apps/transport/lib/mix/tasks/transport/import_departements.ex
class Mix.Tasks.Transport.ImportDepartements (line 1) | defmodule Mix.Tasks.Transport.ImportDepartements
method insert_or_update_departement (line 15) | def insert_or_update_departement(
method geojson_by_insee (line 47) | defp geojson_by_insee do
method get_or_create_departement (line 58) | defp get_or_create_departement(insee) do
method build_geometry (line 70) | defp build_geometry(geojsons, insee) do
method load_etalab_departements (line 77) | defp load_etalab_departements do
method readable_changeset (line 85) | defp readable_changeset(%Ecto.Changeset{changes: changes, data: data}) do
method run (line 91) | def run(_params) do
method ensure_valid_geometries (line 122) | defp ensure_valid_geometries,
FILE: apps/transport/lib/mix/tasks/transport/import_epci.ex
class Mix.Tasks.Transport.ImportEPCI (line 1) | defmodule Mix.Tasks.Transport.ImportEPCI
method run (line 15) | def run(_params) do
method get_or_create_epci (line 43) | defp get_or_create_epci(code) do
method insert_epci (line 56) | defp insert_epci(%{"code" => code, "nom" => nom, "type" => type, "mode...
method check_communes_list (line 69) | defp check_communes_list(body) do
method update_communes_epci (line 84) | defp update_communes_epci(%{"code" => code, "membres" => m}) do
method get_insees (line 99) | defp get_insees(members) do
method geojson_by_insee (line 104) | defp geojson_by_insee do
method build_geometry (line 115) | defp build_geometry(geojsons, insee) do
method ensure_valid_geometries (line 120) | defp ensure_valid_geometries,
method normalize_type (line 124) | defp normalize_type("CA"), do: "Communauté d'agglomération"
method normalize_type (line 125) | defp normalize_type("CU"), do: "Communauté urbaine"
method normalize_type (line 126) | defp normalize_type("CC"), do: "Communauté de communes"
method normalize_type (line 127) | defp normalize_type("METRO"), do: "Métropole"
method normalize_type (line 128) | defp normalize_type("MET69"), do: "Métropole de Lyon"
method normalize_mode_financement (line 131) | defp normalize_mode_financement("FPU"), do: "Fiscalité professionnelle...
method normalize_mode_financement (line 132) | defp normalize_mode_financement("FA"), do: "Fiscalité additionnelle"
method update_administrative_division (line 134) | def update_administrative_division do
FILE: apps/transport/lib/mix/tasks/transport/import_offers.ex
class Mix.Tasks.Transport.ImportOffers (line 1) | defmodule Mix.Tasks.Transport.ImportOffers
method run (line 13) | def run(_params) do
method import_offers (line 24) | defp import_offers do
method truncate_offers (line 33) | defp truncate_offers, do: DB.Repo.delete_all(DB.Offer)
FILE: apps/transport/lib/mix/tasks/transport/open_api_spec.ex
class Mix.Tasks.Transport.OpenApiSpec (line 1) | defmodule Mix.Tasks.Transport.OpenApiSpec
method run (line 7) | def run([output_file]) do
FILE: apps/transport/lib/mix/tasks/transport/update_france_geojson.ex
class Mix.Tasks.Transport.UpdateFranceGeoJSON (line 1) | defmodule Mix.Tasks.Transport.UpdateFranceGeoJSON
method run (line 11) | def run(_params) do
method download_geojson (line 22) | defp download_geojson do
method update_france_geom (line 27) | defp update_france_geom(geojson_data) do
FILE: apps/transport/lib/mix/tasks/url.ex
class Mix.Tasks.Url (line 1) | defmodule Mix.Tasks.Url
method run (line 14) | def run([url]) do
method get_file_path (line 26) | def get_file_path(module_name) do
method get_line_number (line 32) | def get_line_number(module, function_name) do
FILE: apps/transport/lib/mix/tasks/yarn.ex
class Mix.Tasks.Yarn (line 1) | defmodule Mix.Tasks.Yarn
method run (line 8) | def run([cmd | _tail]) do
FILE: apps/transport/lib/netex/archive_parser.ex
class Transport.NeTEx.ArchiveParser (line 1) | defmodule Transport.NeTEx.ArchiveParser
method read_stop_places (line 38) | def read_stop_places(%Unzip{} = unzip, file_name) do
method read_stop_places! (line 45) | def read_stop_places!(%Unzip{} = unzip, file_name) do
method read_all_stop_places (line 55) | def read_all_stop_places(zip_file_name) do
method read_all_stop_places! (line 62) | def read_all_stop_places!(zip_file_name) do
method read_service_calendars (line 72) | def read_service_calendars(%Unzip{} = unzip, file_name) do
method read_service_calendars! (line 79) | def read_service_calendars!(%Unzip{} = unzip, file_name) do
method read_all_service_calendars (line 83) | def read_all_service_calendars(zip_file_name) do
method read_all_service_calendars! (line 87) | def read_all_service_calendars!(zip_file_name) do
method read_calendars (line 97) | def read_calendars(%Unzip{} = unzip, file_name) do
method read_calendars! (line 104) | def read_calendars!(%Unzip{} = unzip, file_name) do
method read_all_calendars (line 108) | def read_all_calendars(zip_file_name) do
method read_all_calendars! (line 112) | def read_all_calendars!(zip_file_name) do
method read_types_of_frames (line 122) | def read_types_of_frames(%Unzip{} = unzip, file_name) do
method read_types_of_frames! (line 129) | def read_types_of_frames!(%Unzip{} = unzip, file_name) do
method read_all_types_of_frames (line 133) | def read_all_types_of_frames(zip_file_name) do
method read_all_types_of_frames! (line 137) | def read_all_types_of_frames!(zip_file_name) do
method read_description (line 147) | def read_description(%Unzip{} = unzip, file_name) do
method read_description! (line 154) | def read_description!(%Unzip{} = unzip, file_name) do
method read_all_description (line 158) | def read_all_description(zip_file_name) do
method read_all_description! (line 162) | def read_all_description!(zip_file_name) do
method parse_stream (line 166) | defp parse_stream(unzip, file_name, parser) do
method parse_stream! (line 196) | defp parse_stream!(unzip, file_name, parser) do
method read_all (line 203) | defp read_all(zip_file_name, reader) do
method with_zip_file_handle (line 222) | def with_zip_file_handle(zip_file_name, cb, on_error) do
FILE: apps/transport/lib/netex/calendars_streaming_parser.ex
class Transport.NeTEx.CalendarsStreamingParser (line 1) | defmodule Transport.NeTEx.CalendarsStreamingParser
method initial_state (line 10) | def initial_state do
method unwrap_result (line 17) | def unwrap_result(final_state), do: final_state.calendars ++ final_sta...
method handle_event (line 19) | def handle_event(:start_element, {element, attributes}, state) do
method handle_event (line 37) | def handle_event(:end_element, "GeneralFrame", state) do
method handle_event (line 45) | def handle_event(:end_element, _, state) do
method handle_event (line 69) | def handle_event(_, _, state), do: {:ok, state}
method init_calendar (line 71) | defp init_calendar(state, attrs \\ %{}), do: Map.put(state, :current_c...
method update_calendar (line 73) | defp update_calendar(state, field, value) do
method init_operating_period (line 77) | defp init_operating_period(state, attrs \\ %{}), do: Map.put(state, :c...
method update_operating_period (line 79) | defp update_operating_period(state, field, value) do
method register_calendar (line 83) | defp register_calendar(state) do
method valid_calendar? (line 95) | defp valid_calendar?(%{id: id, start_date: %Date{}, end_date: %Date{}}...
method valid_calendar? (line 99) | defp valid_calendar?(_), do: false
method update_calendar_date (line 101) | defp update_calendar_date(state, field, chars) do
method register_operating_period (line 105) | defp register_operating_period(state) do
method valid_operating_period? (line 117) | defp valid_operating_period?(%{id: id, start_date: %Date{}, end_date: ...
method valid_operating_period? (line 121) | defp valid_operating_period?(_), do: false
method update_operating_period_date (line 123) | defp update_operating_period_date(state, field, chars) do
FILE: apps/transport/lib/netex/chouette_valid_ruleset_generator.ex
class Transport.NeTEx.ChouetteValidRulesetGenerator (line 1) | defmodule Transport.NeTEx.ChouetteValidRulesetGenerator
method mandatory_attributes (line 14) | def mandatory_attributes(parent, names, documentation_title, documenta...
method encode_ruleset (line 23) | def encode_ruleset(definition, device \\ :stdio) do
method process_sub_profile (line 32) | def process_sub_profile(%{sub_profile: sub_profile, ruleset: ruleset}) do
method document_ruleset (line 36) | def document_ruleset(ruleset, device \\ :stdio, markdown_options \\ []...
method document_sub_profile (line 40) | def document_sub_profile(%{title: title, ruleset: ruleset}, device, op...
method header (line 70) | defp header(_level, text), do: text
method process_rule_context (line 72) | def process_rule_context(sub_profile, %{type: :mandatory_attributes, p...
method process_mandatory_attribute_rule (line 80) | def process_mandatory_attribute_rule(sub_profile, parent, name) do
method snake_case (line 106) | def snake_case(string), do: string
method snake_case_ascii (line 116) | defp snake_case_ascii(<<char, rest::bits>>, _), do: [char | snake_case...
method snake_case_ascii (line 117) | defp snake_case_ascii(<<>>, _), do: []
FILE: apps/transport/lib/netex/description_parser.ex
class Transport.NeTEx.DescriptionParser (line 1) | defmodule Transport.NeTEx.DescriptionParser
method initial_state (line 26) | def initial_state do
method unwrap_result (line 30) | def unwrap_result(final_state),
method handle_event (line 43) | def handle_event(:end_element, "Network" = element, state) do
method handle_event (line 47) | def handle_event(:end_element, "Line" = element, state) do
method handle_event (line 51) | def handle_event(:end_element, "Quay" = element, state) do
method handle_event (line 55) | def handle_event(:end_element, "StopPlace" = element, state) do
method handle_event (line 59) | def handle_event(:end_element, element, state) do
method handle_event (line 73) | def handle_event(_, _, state), do: {:ok, state}
method end_element (line 75) | defp end_element(state, element) do
method register_network (line 79) | defp register_network(state, network), do: update_in(state, [:networks...
method register_transport_mode (line 81) | defp register_transport_mode(state, transport_mode),
method increment (line 84) | defp increment(state, key), do: update_in(state, [key], &(&1 + 1))
method feature_detection (line 210) | defp feature_detection(state, element_name) do
method has (line 221) | defp has(state, feature), do: update_in(state, [:features, feature], f...
FILE: apps/transport/lib/netex/french_profile.ex
class Transport.NeTEx.FrenchProfile (line 1) | defmodule Transport.NeTEx.FrenchProfile
FILE: apps/transport/lib/netex/french_profile/v1.ex
class Transport.NeTEx.FrenchProfile.V1 (line 1) | defmodule Transport.NeTEx.FrenchProfile.V1
method slug (line 14) | def slug, do: "pan:french_profile:1"
method ruleset (line 16) | def ruleset(device \\ :stdio) do
method markdown (line 20) | def markdown(device \\ :stdio, markdown_options \\ []) do
method definition (line 24) | defp definition do
method elements_communs (line 28) | def elements_communs do
method arrets (line 79) | def arrets do
method parkings (line 94) | def parkings do
method description_reseaux (line 118) | def description_reseaux do
method horaires (line 149) | def horaires do
method accessibilite (line 165) | def accessibilite do
method tarifs (line 181) | def tarifs do
FILE: apps/transport/lib/netex/french_profile/v2.ex
class Transport.NeTEx.FrenchProfile.V2 (line 1) | defmodule Transport.NeTEx.FrenchProfile.V2
method slug (line 16) | def slug, do: "pan:french_profile:2"
method ruleset (line 18) | def ruleset(device \\ :stdio) do
method markdown (line 22) | def markdown(device \\ :stdio, markdown_options \\ []) do
method definition (line 26) | defp definition do
method elements_communs (line 38) | def elements_communs do
FILE: apps/transport/lib/netex/netex_helpers.ex
class Transport.NeTEx.NeTExHelpers (line 1) | defmodule Transport.NeTEx.NeTExHelpers
method parse_type_of_frame (line 38) | def parse_type_of_frame(type_of_frame) do
method calendar_frame? (line 48) | def calendar_frame?(type_of_frame) do
FILE: apps/transport/lib/netex/saxy_helpers.ex
class Transport.NeTEx.SaxyHelpers (line 1) | defmodule Transport.NeTEx.SaxyHelpers
method get_attribute! (line 6) | def get_attribute!(attributes, attr_name) do
method parse_float! (line 13) | def parse_float!(binary) do
method parse_date (line 18) | def parse_date(state, chars, cb) do
method parse_date_utc (line 28) | defp parse_date_utc(state, chars, cb) do
method capturing_initial_state (line 41) | def capturing_initial_state(initial_state) do
method push (line 48) | def push(state, element), do: state |> update_in([:current_tree], &(&1...
method pop (line 50) | def pop(state), do: update_in(state, [:current_tree], &(&1 |> List.del...
method reset_tree (line 52) | def reset_tree(state), do: %{state | current_tree: []}
method start_capture (line 54) | def start_capture(state), do: %{state | capture: true}
method stop_capture (line 56) | def stop_capture(state), do: %{state | capture: false}
FILE: apps/transport/lib/netex/service_calendars_streaming_parser.ex
class Transport.NeTEx.ServiceCalendarsStreamingParser (line 1) | defmodule Transport.NeTEx.ServiceCalendarsStreamingParser
method initial_state (line 10) | def initial_state do
method unwrap_result (line 18) | def unwrap_result(final_state), do: final_state.service_calendars
method handle_event (line 20) | def handle_event(:start_element, {element, attributes}, state) do
method handle_event (line 39) | def handle_event(:end_element, node, state) do
method handle_event (line 82) | def handle_event(:characters, _chars, state) do
method handle_event (line 86) | def handle_event(_, _, state), do: {:ok, state}
method init_service_calendar (line 88) | defp init_service_calendar(state, attrs), do: Map.put(state, :current_...
method update_service_calendar (line 90) | defp update_service_calendar(state, field, value) do
method register_service_calendar (line 94) | defp register_service_calendar(state) do
method valid_service_calendar? (line 107) | defp valid_service_calendar?(%{id: id, name: name, start_date: %Date{}...
method valid_service_calendar? (line 111) | defp valid_service_calendar?(_), do: false
method update_date (line 113) | defp update_date(state, field, chars) do
FILE: apps/transport/lib/netex/stop_places_streaming_parser.ex
class Transport.NeTEx.StopPlacesStreamingParser (line 1) | defmodule Transport.NeTEx.StopPlacesStreamingParser
method initial_state (line 28) | def initial_state do
method unwrap_result (line 39) | def unwrap_result(final_state), do: final_state.stop_places
method handle_event (line 42) | def handle_event(:start_element, {"StopPlace" = element, attributes}, ...
method handle_event (line 58) | def handle_event(:end_element, "StopPlace" = _node, state) do
method handle_event (line 63) | def handle_event(:end_element, _node, state) do
method handle_event (line 81) | def handle_event(_, _, state), do: {:ok, state}
FILE: apps/transport/lib/netex/to_geojson/coordinates.ex
class Transport.NeTEx.ToGeoJSON.Coordinates (line 1) | defmodule Transport.NeTEx.ToGeoJSON.Coordinates
method parse_coordinate_pairs_lon_lat (line 100) | defp parse_coordinate_pairs_lon_lat(pairs) do
method parse_lon_lat_pair (line 109) | defp parse_lon_lat_pair(pair) do
method parse_coordinate_pairs (line 122) | defp parse_coordinate_pairs(values) do
method parse_float (line 140) | defp parse_float(string) do
FILE: apps/transport/lib/netex/to_geojson/geojson_builder.ex
class Transport.NeTEx.ToGeoJSON.GeoJSONBuilder (line 1) | defmodule Transport.NeTEx.ToGeoJSON.GeoJSONBuilder
method point_feature (line 43) | def point_feature(coordinates, properties, id \\ nil)
method line_string_feature (line 77) | def line_string_feature(coordinates, properties, id \\ nil)
method stop_to_feature (line 125) | def stop_to_feature(_stop), do: nil
method service_link_to_feature (line 159) | def service_link_to_feature(_link), do: nil
method maybe_add_id (line 161) | defp maybe_add_id(feature, nil), do: feature
method maybe_add_id (line 162) | defp maybe_add_id(feature, id), do: Map.put(feature, "id", id)
method maybe_put (line 164) | defp maybe_put(map, _key, nil), do: map
method maybe_put (line 165) | defp maybe_put(map, key, value), do: Map.put(map, key, value)
FILE: apps/transport/lib/netex/to_geojson/parsers/quay_parser.ex
class Transport.NeTEx.ToGeoJSON.QuayParser (line 1) | defmodule Transport.NeTEx.ToGeoJSON.QuayParser
method parse_stream (line 44) | def parse_stream(stream) do
method handle_event (line 64) | def handle_event(:start_element, {"Quay" = element, attributes}, state...
method handle_event (line 77) | def handle_event(:start_element, {element, _attributes}, %{capture: tr...
method handle_event (line 82) | def handle_event(:end_element, "Quay", state) do
method handle_event (line 94) | def handle_event(:end_element, _element, %{capture: true} = state) do
method handle_event (line 99) | def handle_event(:characters, chars, %{current_tree: ["Quay", "Name"]}...
method handle_event (line 104) | def handle_event(:characters, chars, %{current_tree: ["Quay", "PublicC...
method handle_event (line 109) | def handle_event(
method handle_event (line 118) | def handle_event(
method handle_event (line 127) | def handle_event(_, _, state), do: {:ok, state}
method get_attribute (line 129) | defp get_attribute(attributes, name) do
method parse_float! (line 136) | defp parse_float!(binary) do
FILE: apps/transport/lib/netex/to_geojson/parsers/service_link_parser.ex
class Transport.NeTEx.ToGeoJSON.S
Copy disabled (too large)
Download .json
Condensed preview — 1573 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (21,670K chars).
[
{
"path": ".credo.exs",
"chars": 5863,
"preview": "# This file contains the configuration for Credo and you are probably reading\n# this after creating it with `mix credo.g"
},
{
"path": ".dialyzer_ignore.exs",
"chars": 756,
"preview": "[\n # temporary fix for https://github.com/elixir-ecto/postgrex/issues/549\n ~r/deps\\/postgrex\\/lib\\/postgrex\\/type_modu"
},
{
"path": ".editorconfig",
"chars": 431,
"preview": "root = true\n\n[*]\nindent_size = 2\nindent_style = space\ncharset = utf-8\ntrim_tra"
},
{
"path": ".eslintignore",
"chars": 47,
"preview": "/node_modules\n**/*/node_modules\npriv/static/js\n"
},
{
"path": ".formatter.exs",
"chars": 267,
"preview": "[\n plugins: [Phoenix.LiveView.HTMLFormatter],\n inputs: [\n \"mix.exs\",\n \"config/*.exs\",\n \"apps/*/{lib,test}/**/"
},
{
"path": ".github/CODEOWNERS",
"chars": 25,
"preview": "* @etalab/transport-tech\n"
},
{
"path": ".github/actions/checkout-compile/action.yml",
"chars": 2365,
"preview": "name: \"Checkout & compile\"\ndescription: \"Checkout and compile the code\"\nruns:\n using: \"composite\"\n steps:\n - uses: "
},
{
"path": ".github/workflows/ops_tests.yml",
"chars": 301,
"preview": "name: CI ops tests\non: push\n\njobs:\n test:\n runs-on: ubuntu-latest\n name: Run ops tests\n steps:\n - uses: a"
},
{
"path": ".github/workflows/sentry_release.yml",
"chars": 667,
"preview": "name: Sentry release integration\non:\n push:\n branches:\n - master\njobs:\n # See https://github.com/marketplace/a"
},
{
"path": ".github/workflows/test.yml",
"chars": 1786,
"preview": "name: Test suite\non:\n - push\n - workflow_dispatch\n\nenv:\n # https://github.com/erlef/setup-beam#self-hosted-runners\n "
},
{
"path": ".github/workflows/trivy_scan.yml",
"chars": 616,
"preview": "name: trivy_scan\non:\n schedule:\n - cron: 0 9 * * MON\njobs:\n build:\n name: Scan\n runs-on: ubuntu-latest\n st"
},
{
"path": ".gitignore",
"chars": 1160,
"preview": "# App artifacts\n/_build\n/db\n/deps\n/*.ez\n.elixir_ls\n.DS_Store\n\n# Generated on crash by the VM\nerl_crash.dump\n\n# Npm stuff"
},
{
"path": ".miniorc.template",
"chars": 1619,
"preview": "# Steps to run minio locally as a S3 container for development:\n# 1. cp .miniorc.template .miniorc\n# 2. Install MinIO lo"
},
{
"path": ".stylelintrc.json",
"chars": 1469,
"preview": "{\n \"extends\": [\"stylelint-config-standard\", \"stylelint-config-standard-scss\"],\n \"rules\": {\n \"scss/dollar-variable"
},
{
"path": ".tool-versions",
"chars": 817,
"preview": "# NOTE: this file does _not_ define what is used in production\n# (see https://github.com/etalab/transport-site/issues/13"
},
{
"path": ".vscode/launch.json",
"chars": 460,
"preview": "{\n // Use IntelliSense to learn about possible attributes.\n // Hover to view descriptions of existing attributes.\n"
},
{
"path": "Dockerfile",
"chars": 2290,
"preview": "FROM ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0\n\nRUN mkdi"
},
{
"path": "Dockerfile.dev",
"chars": 709,
"preview": "FROM ghcr.io/transportdatagouvfr/ops:elixir-1.19.4-erlang-27.3.4.1-ubuntu-noble-20251013-transport-tools-2.0.0\n\nRUN apt-"
},
{
"path": "LICENSE.AGPL.txt",
"chars": 34520,
"preview": " GNU AFFERO GENERAL PUBLIC LICENSE\n Version 3, 19 November 2007\n\n Copyright (C)"
},
{
"path": "README.md",
"chars": 14990,
"preview": "# Transport\n\nThis is the repository of the [french National Access Point](https://transport.data.gouv.fr/) (NAP) for mob"
},
{
"path": "apps/shared/lib/application.ex",
"chars": 563,
"preview": "defmodule Shared.Application do\n @moduledoc false\n\n use Application\n\n def start(_type, _args) do\n children = [\n "
},
{
"path": "apps/shared/lib/appsignal_filter.ex",
"chars": 1009,
"preview": "defmodule TransportWeb.Plugs.AppSignalFilter do\n @moduledoc \"\"\"\n An attempt to reduce the volume of events sent to App"
},
{
"path": "apps/shared/lib/cldr.ex",
"chars": 326,
"preview": "defmodule Transport.Cldr do\n @moduledoc \"\"\"\n Declares a backend for Cldr as required.\n https://hexdocs.pm/ex_cldr_num"
},
{
"path": "apps/shared/lib/conditional_json_encoder.ex",
"chars": 1607,
"preview": "defmodule Transport.Shared.ConditionalJSONEncoder do\n require Logger\n\n @moduledoc \"\"\"\n Some of the JSON payloads the "
},
{
"path": "apps/shared/lib/data_visualization.ex",
"chars": 1495,
"preview": "defmodule Transport.DataVisualization do\n @moduledoc \"\"\"\n Wrapper for DataVisualization\n \"\"\"\n @callback has_features"
},
{
"path": "apps/shared/lib/date_time_display.ex",
"chars": 9486,
"preview": "defmodule Shared.DateTimeDisplay do\n @moduledoc \"\"\"\n A module to have a coherent display of dates and times accross th"
},
{
"path": "apps/shared/lib/hasher.ex",
"chars": 4508,
"preview": "defmodule Hasher.Wrapper do\n @moduledoc \"\"\"\n A Hasher wrapper, useful for testing purposes\n \"\"\"\n\n @callback get_cont"
},
{
"path": "apps/shared/lib/helpers.ex",
"chars": 1763,
"preview": "defmodule Helpers do\n @moduledoc \"\"\"\n Helper functions that are used accross the whole project\n \"\"\"\n require Logger\n"
},
{
"path": "apps/shared/lib/http_stream_v2.ex",
"chars": 4274,
"preview": "defmodule HTTPStreamV2 do\n @moduledoc \"\"\"\n A new module able to compute checksum of a given URL via streaming, all\n w"
},
{
"path": "apps/shared/lib/proxy.ex",
"chars": 530,
"preview": "defmodule Shared.Proxy do\n @moduledoc \"\"\"\n Shared methods useful when proxying requests in our apps.\n \"\"\"\n\n @doc \"\"\""
},
{
"path": "apps/shared/lib/req_custom_cache.ex",
"chars": 2639,
"preview": "defmodule Transport.Shared.ReqCustomCache do\n @moduledoc \"\"\"\n A simple HTTP cache for `req` that do not use headers. I"
},
{
"path": "apps/shared/lib/resource_schema.ex",
"chars": 1545,
"preview": "defmodule Transport.Shared.ResourceSchema do\n @moduledoc \"\"\"\n Guess schema names and versions for resources\n \"\"\"\n im"
},
{
"path": "apps/shared/lib/s3.ex",
"chars": 3619,
"preview": "defmodule Transport.S3 do\n @moduledoc \"\"\"\n This module contains common code related to S3 object storage.\n \"\"\"\n requ"
},
{
"path": "apps/shared/lib/sentry_exception_filter.ex",
"chars": 995,
"preview": "defmodule Transport.Shared.SentryExceptionFilter do\n @moduledoc \"\"\"\n This module is used to avoid spamming our Sentry "
},
{
"path": "apps/shared/lib/syntax_colors.ex",
"chars": 1206,
"preview": "defmodule Transport.Inspect do\n @moduledoc \"\"\"\n While creating scripts (`mix run script.exs`), it is useful to\n color"
},
{
"path": "apps/shared/lib/time_wrapper.ex",
"chars": 1367,
"preview": "defmodule TimeWrapper do\n @moduledoc \"\"\"\n This module concentrates all the calls to `Timex` in a single place.\n\n The "
},
{
"path": "apps/shared/lib/validation/gbfs_validator.ex",
"chars": 2539,
"preview": "defmodule Shared.Validation.GBFSValidator do\n @moduledoc \"\"\"\n A module to validate GBFS feeds\n \"\"\"\n\n defmodule Summa"
},
{
"path": "apps/shared/lib/validation/gtfs_validator.ex",
"chars": 2517,
"preview": "defmodule Shared.Validation.GtfsValidator.Wrapper do\n @moduledoc \"\"\"\n A wrapper for GtfsValidator\n \"\"\"\n\n def impl, d"
},
{
"path": "apps/shared/lib/validation/validator.ex",
"chars": 384,
"preview": "defmodule Shared.Validation.Validator do\n @moduledoc \"\"\"\n Describe the behaviour of a resource validator.\n \"\"\"\n\n # @"
},
{
"path": "apps/shared/lib/wrapper/wrapper_httpoison.ex",
"chars": 398,
"preview": "defmodule Transport.Shared.Wrapper.HTTPoison do\n @moduledoc \"\"\"\n Temporary: a HTTPoison wrapper currently used by some"
},
{
"path": "apps/shared/lib/wrapper/wrapper_req.ex",
"chars": 2989,
"preview": "defmodule Transport.Req.Behaviour do\n @moduledoc \"\"\"\n At time of writing, Req does not introduce a behaviour allowing "
},
{
"path": "apps/shared/lib/wrapper_ex_aws.ex",
"chars": 305,
"preview": "defmodule Transport.Wrapper.ExAWS do\n @moduledoc \"\"\"\n Central access point for the ExAWS behaviour defined at\n https:"
},
{
"path": "apps/shared/meta/schema-irve-dynamique.json",
"chars": 7334,
"preview": "{\n \"$schema\": \"https://frictionlessdata.io/schemas/table-schema.json\",\n \"name\": \"schema-irve-dynamique\",\n \"titl"
},
{
"path": "apps/shared/meta/schema-irve-statique.json",
"chars": 22628,
"preview": "{\n \"$schema\": \"https://frictionlessdata.io/schemas/table-schema.json\",\n \"name\": \"schema-irve-statique\",\n \"title"
},
{
"path": "apps/shared/mix.exs",
"chars": 2524,
"preview": "defmodule Shared.MixProject do\n use Mix.Project\n\n def project do\n [\n app: :shared,\n version: \"0.1.0\",\n "
},
{
"path": "apps/shared/test/data_visualization_test.exs",
"chars": 2810,
"preview": "defmodule Transport.DataVisualizationTest do\n use ExUnit.Case\n alias Transport.DataVisualization\n doctest Transport.D"
},
{
"path": "apps/shared/test/date_time_display_test.exs",
"chars": 109,
"preview": "defmodule Shared.DateTimeDisplayTest do\n use ExUnit.Case\n doctest Shared.DateTimeDisplay, import: true\nend\n"
},
{
"path": "apps/shared/test/hasher_test.exs",
"chars": 1598,
"preview": "defmodule HasherTest do\n use ExUnit.Case, async: true\n import Transport.Test.TestUtils, only: [zip_metadata: 0]\n doct"
},
{
"path": "apps/shared/test/helpers/helpers_test.exs",
"chars": 387,
"preview": "defmodule Helpers.HelpersTest do\n use ExUnit.Case\n doctest Helpers, import: true\n\n test \"last_updated\" do\n assert "
},
{
"path": "apps/shared/test/http_stream_v2_test.exs",
"chars": 5142,
"preview": "defmodule HTTPStreamV2.Test do\n use ExUnit.Case, async: true\n\n setup do\n bypass = Bypass.open()\n {:ok, bypass: b"
},
{
"path": "apps/shared/test/resource_schema_test.exs",
"chars": 140,
"preview": "defmodule Transport.Shared.ResourceSchemaTest do\n use ExUnit.Case, async: true\n doctest Transport.Shared.ResourceSchem"
},
{
"path": "apps/shared/test/s3_test.exs",
"chars": 759,
"preview": "defmodule Transport.S3Test do\n use ExUnit.Case, async: true\n\n import Mox\n\n setup :verify_on_exit!\n\n test \"bucket_nam"
},
{
"path": "apps/shared/test/support/cache_case.ex",
"chars": 699,
"preview": "defmodule Shared.CacheCase do\n @moduledoc \"\"\"\n This module defines the test case to be used by\n tests that require ac"
},
{
"path": "apps/shared/test/support/mocks.ex",
"chars": 297,
"preview": "Mox.defmock(Transport.HTTPoison.Mock, for: HTTPoison.Base)\nMox.defmock(Transport.Req.Mock, for: Transport.Req.Behaviour)"
},
{
"path": "apps/shared/test/support/s3_test_utils.ex",
"chars": 3321,
"preview": "defmodule Transport.Test.S3TestUtils do\n @moduledoc \"\"\"\n Some utility functions for S3 mocks\n \"\"\"\n import Mox\n impo"
},
{
"path": "apps/shared/test/support/test_utils.ex",
"chars": 3690,
"preview": "defmodule Transport.Test.TestUtils do\n @moduledoc \"\"\"\n Some useful functions for testing\n \"\"\"\n\n @doc \"\"\"\n Polls `fu"
},
{
"path": "apps/shared/test/test_helper.exs",
"chars": 15,
"preview": "ExUnit.start()\n"
},
{
"path": "apps/shared/test/time_wrapper_test.exs",
"chars": 86,
"preview": "defmodule TimeWrapperTest do\n use ExUnit.Case, async: true\n doctest TimeWrapper\nend\n"
},
{
"path": "apps/shared/test/validation/gbfs_validator_test.exs",
"chars": 3390,
"preview": "defmodule GBFSValidatorTest do\n use ExUnit.Case, async: true\n alias Shared.Validation.GBFSValidator.{HTTPValidatorClie"
},
{
"path": "apps/shared/test/validation/gtfs_validator_test.exs",
"chars": 2529,
"preview": "defmodule GtfsValidatorTest do\n use ExUnit.Case, async: true\n doctest Shared.Validation.GtfsValidator\n\n import Mox\n\n "
},
{
"path": "apps/transport/client/.prettierignore",
"chars": 197,
"preview": "node_modules/\npriv/\nbuild/\ndist/\nyarn.lock\npackage.json\n\n# styles handled by stylelint, not prettier\n*.css\n*.scss\n\n# bui"
},
{
"path": "apps/transport/client/.prettierrc.json",
"chars": 146,
"preview": "{\n \"tabWidth\": 4,\n \"semi\": false,\n \"singleQuote\": true,\n \"trailingComma\": \"none\",\n \"arrowParens\": \"avoid\""
},
{
"path": "apps/transport/client/eslint.config.mjs",
"chars": 821,
"preview": "import js from '@eslint/js'\nimport globals from 'globals'\nimport prettier from 'eslint-config-prettier/flat'\n\nexport def"
},
{
"path": "apps/transport/client/javascripts/app.js",
"chars": 3053,
"preview": "import { Socket } from 'phoenix'\nimport { LiveSocket } from 'phoenix_live_view'\nimport Prism from 'prismjs'\nimport forma"
},
{
"path": "apps/transport/client/javascripts/autocomplete.js",
"chars": 4897,
"preview": "/* eslint no-unused-vars: [2, {\"args\": \"after-used\", \"varsIgnorePattern\": \"autoCompletejs\"}] */\n/* global contactId, lab"
},
{
"path": "apps/transport/client/javascripts/autocomplete_address.js",
"chars": 1969,
"preview": "const AutoComplete = require('@tarekraafat/autocomplete.js/dist/autoComplete')\n\nnew AutoComplete({\n data: {\n s"
},
{
"path": "apps/transport/client/javascripts/clipboard.js",
"chars": 169,
"preview": "import Clipboard from 'clipboard'\n\nconst clipboard = new Clipboard('.button')\nclipboard.on('success', e => {\n e.trigg"
},
{
"path": "apps/transport/client/javascripts/dataset-map.js",
"chars": 901,
"preview": "import L from 'leaflet'\nimport { IGN } from './map-config'\n\nfunction initilizeMap(id) {\n const map = L.map(id, { rend"
},
{
"path": "apps/transport/client/javascripts/explore.js",
"chars": 9956,
"preview": "import { Socket } from 'phoenix'\nimport Leaflet from 'leaflet'\nimport { LeafletLayer } from 'deck.gl-leaflet'\nimport { S"
},
{
"path": "apps/transport/client/javascripts/fullscreen_wrapper.js",
"chars": 342,
"preview": "const buttons = [document.getElementById('enter-fullscreen'), document.getElementById('exit-fullscreen')]\nbuttons.forEac"
},
{
"path": "apps/transport/client/javascripts/gtfs.js",
"chars": 7082,
"preview": "import Leaflet from 'leaflet'\nimport { LeafletLayer } from 'deck.gl-leaflet'\nimport { ScatterplotLayer, GeoJsonLayer } f"
},
{
"path": "apps/transport/client/javascripts/map-config.js",
"chars": 962,
"preview": "export const Mapbox = {\n url: 'https://api.mapbox.com/styles/v1/transport-pan/clj8j9fla009701pie4nrfo62/tiles/{tileSi"
},
{
"path": "apps/transport/client/javascripts/map-geojson.js",
"chars": 5540,
"preview": "import L from 'leaflet'\nimport { IGN } from './map-config'\n\nfunction initializeMap(id) {\n const map = L.map(id, { ren"
},
{
"path": "apps/transport/client/javascripts/map.js",
"chars": 16543,
"preview": "import Leaflet from 'leaflet'\nimport 'leaflet.pattern'\nimport { Mapbox } from './map-config'\n\nconst aomsUrl = '/api/stat"
},
{
"path": "apps/transport/client/javascripts/resource-viz.js",
"chars": 12842,
"preview": "import L from 'leaflet'\nimport Papa from 'papaparse'\nimport { IGN } from './map-config'\n\n// possible field names in csv "
},
{
"path": "apps/transport/client/javascripts/utils.js",
"chars": 1249,
"preview": "const addSeeMore = function (maxHeight, querySelector, seeMoreText, seeLessText, featureName) {\n document.querySelect"
},
{
"path": "apps/transport/client/javascripts/validation-map.js",
"chars": 1907,
"preview": "import L from 'leaflet'\nimport { IGN } from './map-config'\n\nfunction initilizeMap(id) {\n const map = L.map(id, { rend"
},
{
"path": "apps/transport/client/javascripts/vega.js",
"chars": 57,
"preview": "import embed from 'vega-embed'\n\nwindow.vegaEmbed = embed\n"
},
{
"path": "apps/transport/client/package.json",
"chars": 2294,
"preview": "{\n \"version\": \"0.0.1\",\n \"description\": \"Rendre disponible, valoriser et améliorer les données transports\",\n \"license\""
},
{
"path": "apps/transport/client/stylesheets/_states.scss",
"chars": 105,
"preview": ".is-active {\n background: $background-color;\n color: $white;\n}\n\n.is-centered {\n text-align: center;\n}\n"
},
{
"path": "apps/transport/client/stylesheets/app.scss",
"chars": 1897,
"preview": "// Global styles: variables, mixins, external libraries, and so on.\n@import 'globals/variables';\n@import 'globals/extern"
},
{
"path": "apps/transport/client/stylesheets/components/_aom_table.scss",
"chars": 707,
"preview": ".aom_table {\n td {\n padding: 10px;\n }\n\n tr:nth-child(odd) {\n background-color: $lighter-grey;\n }\n\n th {\n p"
},
{
"path": "apps/transport/client/stylesheets/components/_autocomplete.scss",
"chars": 2030,
"preview": ".autoCompleteResultsField {\n padding-top: 5px;\n position: relative;\n}\n\n#autoCompleteResults {\n background-color: whit"
},
{
"path": "apps/transport/client/stylesheets/components/_backoffice.scss",
"chars": 7852,
"preview": ".dataset_import_validations_logs {\n display: flex;\n flex-direction: column;\n align-items: center;\n padding-top: 48px"
},
{
"path": "apps/transport/client/stylesheets/components/_blog.scss",
"chars": 164,
"preview": "article.panel {\n p {\n img {\n display: block;\n margin-left: auto;\n margin-right: auto;\n max-width"
},
{
"path": "apps/transport/client/stylesheets/components/_choose_file.scss",
"chars": 533,
"preview": ".add-update-resource {\n .small-bottom-margin {\n margin-bottom: 6px;\n }\n\n .choose-file {\n background-color: var("
},
{
"path": "apps/transport/client/stylesheets/components/_colorful-button.scss",
"chars": 2674,
"preview": "/* Colorful link or button\n *\n * Defaults to blue.\n *\n * Variant classes:\n * - `.valid`: green\n * - `.invalid`: red\n * -"
},
{
"path": "apps/transport/client/stylesheets/components/_community-resources.scss",
"chars": 376,
"preview": "#communityresources {\n h4 {\n margin-top: 0.5em;\n margin-bottom: 0.3em;\n }\n\n .main-pan {\n margin-top: 1em;\n "
},
{
"path": "apps/transport/client/stylesheets/components/_dataset-details.scss",
"chars": 10085,
"preview": ".dataset-page {\n display: flex;\n flex-wrap: wrap-reverse;\n align-items: stretch;\n justify-content: center;\n backgro"
},
{
"path": "apps/transport/client/stylesheets/components/_discussions.scss",
"chars": 1975,
"preview": ".discussion {\n margin-bottom: 2vw;\n\n .discussion-date {\n color: var(--darker-grey);\n margin-left: 0.5em;\n }\n\n "
},
{
"path": "apps/transport/client/stylesheets/components/_download_availability.scss",
"chars": 1811,
"preview": ".download_availability {\n display: flex;\n align-items: flex-end;\n height: 36px;\n width: 100%;\n\n @media (max-width: "
},
{
"path": "apps/transport/client/stylesheets/components/_error.scss",
"chars": 669,
"preview": ".error {\n margin-top: 48px;\n text-align: center;\n h1 {\n font-size: 1.2em;\n font-weight: bold;\n margin: 0 aut"
},
{
"path": "apps/transport/client/stylesheets/components/_explore.scss",
"chars": 1309,
"preview": ".explore .grid {\n display: grid;\n grid-template-columns: 1fr 1fr;\n grid-template-areas:\n \"gtfs-rt irve\"\n \"bnlc "
},
{
"path": "apps/transport/client/stylesheets/components/_feedback.scss",
"chars": 711,
"preview": ".feedback-selector {\n input {\n display: none;\n }\n\n label {\n cursor: pointer;\n display: inl"
},
{
"path": "apps/transport/client/stylesheets/components/_fullscreen-wrapper.scss",
"chars": 880,
"preview": "#fullscreen-wrapper {\n button {\n background-color: var(--lighter-grey);\n border: 1px solid var(--grey);\n borde"
},
{
"path": "apps/transport/client/stylesheets/components/_gtfs_diff.scss",
"chars": 3941,
"preview": "#gtfs-diff-steps {\n margin: -3em auto 2em;\n li.active {\n font-weight: bold;\n }\n}\n\n.actions {\n margin-top: var(--s"
},
{
"path": "apps/transport/client/stylesheets/components/_guide.scss",
"chars": 166,
"preview": "#guide {\n max-width: 1300px;\n margin: calc(1vw + 1em) calc(8vw + 1em);\n padding: 8em;\n\n ul {\n margin-left: 2em;\n "
},
{
"path": "apps/transport/client/stylesheets/components/_icons.scss",
"chars": 1824,
"preview": ".icon {\n @extend .fas !optional;\n\n margin-right: 0.5em;\n}\n\n.icon--badge {\n @extend .fa-id-badge !optional;\n}\n\n.icon--"
},
{
"path": "apps/transport/client/stylesheets/components/_landing_page_vls.scss",
"chars": 4617,
"preview": "article.landing_page_vls {\n $vls-blue: #4a90e2 !default;\n\n .wrapper {\n display: grid;\n grid-template-columns:\n "
},
{
"path": "apps/transport/client/stylesheets/components/_legal.scss",
"chars": 203,
"preview": "#legal {\n max-width: 1300px;\n margin: calc(1vw + 1em) calc(8vw + 1em);\n padding: 8em;\n\n ul {\n margin-left: 2em;\n "
},
{
"path": "apps/transport/client/stylesheets/components/_login.scss",
"chars": 284,
"preview": "#login {\n h1 {\n text-align: center;\n }\n\n ul {\n padding: 0;\n }\n\n li {\n list-style-type: none;\n line-heig"
},
{
"path": "apps/transport/client/stylesheets/components/_logo.scss",
"chars": 148,
"preview": ".logo {\n @include media-tablet-up {\n height: modular-scale(3);\n }\n\n float: left;\n height: modular-scale(1);\n\n im"
},
{
"path": "apps/transport/client/stylesheets/components/_mail.scss",
"chars": 1329,
"preview": "a.mail__button {\n background-color: $blue;\n border-radius: 50px;\n bottom: 1em;\n color: $white;\n font-size: 1.5em;\n "
},
{
"path": "apps/transport/client/stylesheets/components/_mailing-list.scss",
"chars": 131,
"preview": ".mailing-list__social-container {\n margin: 1em 0;\n\n img {\n height: 2em;\n min-width: 2em;\n vertical-align: mid"
},
{
"path": "apps/transport/client/stylesheets/components/_map-js.scss",
"chars": 883,
"preview": ".geojson-map {\n height: 600px;\n max-height: 80vh;\n}\n\n$light-green-map: #bce954 !default;\n\n.map-bg-green {\n background"
},
{
"path": "apps/transport/client/stylesheets/components/_message.scss",
"chars": 428,
"preview": ".message--success {\n background: $background-message-success;\n border: 1px solid $border-message-success;\n}\n\n.message-"
},
{
"path": "apps/transport/client/stylesheets/components/_notification.scss",
"chars": 49,
"preview": ".notification > p {\n padding: 0;\n margin: 0;\n}\n"
},
{
"path": "apps/transport/client/stylesheets/components/_pagination.scss",
"chars": 628,
"preview": "ul.pagination {\n list-style-type: none;\n margin: 0;\n padding: 0;\n overflow: hidden;\n display: flex;\n justify-conte"
},
{
"path": "apps/transport/client/stylesheets/components/_resource-details.scss",
"chars": 995,
"preview": "#resource-geojson {\n .leaflet-popup-content {\n overflow: auto;\n }\n}\n\n.full-width {\n width: 100%;\n}\n\n.networks-list"
},
{
"path": "apps/transport/client/stylesheets/components/_search.scss",
"chars": 141,
"preview": ".search-title {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 24px;\n h2 {\n"
},
{
"path": "apps/transport/client/stylesheets/components/_shortlist.scss",
"chars": 4445,
"preview": ".shortlist {\n display: flex;\n}\n\n.shortlist .side-pane {\n padding: 0;\n margin-right: 2em;\n flex: 0 0 15em;\n}\n\n.shortl"
},
{
"path": "apps/transport/client/stylesheets/components/_stats.scss",
"chars": 3782,
"preview": ".deployment .hero__container {\n min-height: 15em;\n width: 80%;\n margin: auto;\n}\n\n.deployment h2 {\n margin-top: 3em;\n"
},
{
"path": "apps/transport/client/stylesheets/components/_tooltip.scss",
"chars": 1367,
"preview": "/* from https://www.w3schools.com/howto/howto_css_tooltip.asp */\n/* Tooltip container */\n\n$tooltip-width: 150px !default"
},
{
"path": "apps/transport/client/stylesheets/components/_validation.scss",
"chars": 5463,
"preview": "nav.validation {\n padding: 10px;\n h4 {\n margin-bottom: 0;\n }\n ul {\n margin: 0;\n }\n a {\n padding: 0;\n w"
},
{
"path": "apps/transport/client/stylesheets/datasets.scss",
"chars": 379,
"preview": ".dataset-container {\n .form__group {\n margin-bottom: 2em;\n }\n}\n\n.transparent {\n background: transparent;\n}\n\n.side-"
},
{
"path": "apps/transport/client/stylesheets/espace_producteur.scss",
"chars": 2296,
"preview": ".tramway {\n text-align: right;\n width: 50vw;\n position: relative;\n height: 0;\n margin: 0 0 0 auto;\n padding-right:"
},
{
"path": "apps/transport/client/stylesheets/globals/_externals.scss",
"chars": 227,
"preview": "// External styles and libraries.\n\n@import '~leaflet/dist/leaflet';\n@import '~@fortawesome/fontawesome-free/scss/fontawe"
},
{
"path": "apps/transport/client/stylesheets/globals/_mixins.scss",
"chars": 366,
"preview": "// Useful mixins to DRY.\n\n@mixin mobile {\n @media (max-width: $breakpoint-tablet) {\n @content;\n }\n}\n\n@mixin tablet "
},
{
"path": "apps/transport/client/stylesheets/globals/_variables.scss",
"chars": 3266,
"preview": "// Variables to setup globally some site standard values.\n\n// # Colours\n\n$lighter-blue: hsl(201 88% 84%) "
},
{
"path": "apps/transport/client/stylesheets/home.scss",
"chars": 9276,
"preview": "html {\n scroll-behavior: smooth;\n scroll-padding-top: 25px;\n}\n\n.hero.home-hero {\n background: #fff url('/images/trans"
},
{
"path": "apps/transport/client/stylesheets/main.scss",
"chars": 3212,
"preview": "body {\n scroll-behavior: smooth;\n}\n\nmain {\n min-height: calc(100% - 96px - 73px);\n}\n\n.hero {\n background: $white;\n}\n\n"
},
{
"path": "apps/transport/client/stylesheets/prism.css",
"chars": 1474,
"preview": "/* PrismJS 1.29.0\nhttps://prismjs.com/download.html#themes=prism-okaidia&languages=markup */\ncode[class*=language-],pre["
},
{
"path": "apps/transport/client/stylesheets/producteurs.scss",
"chars": 3849,
"preview": ".with-gradient {\n background: linear-gradient(\n 0deg,\n rgb(215 232 255) 0%,\n rgb(255 255 255) 50%\n );\n}\n\n.soc"
},
{
"path": "apps/transport/client/stylesheets/reuser_space.scss",
"chars": 2522,
"preview": ".reuser-space {\n padding-top: 24px;\n\n .row {\n display: grid;\n grid-template-columns: repeat(3, 1fr);\n column-"
},
{
"path": "apps/transport/client/stylesheets/reuses.scss",
"chars": 394,
"preview": "#reuses_search {\n padding-left: 30px;\n width: 400px;\n display: inline-block;\n}\n\n#reuses_search_container {\n margin: "
},
{
"path": "apps/transport/client/webpack.common.js",
"chars": 3480,
"preview": "const { resolve } = require('path')\nconst webpack = require('webpack')\nconst CopyWebpackPlugin = require('copy-webpack-p"
},
{
"path": "apps/transport/client/webpack.dev.js",
"chars": 244,
"preview": "const { merge } = require('webpack-merge')\nconst common = require('./webpack.common.js')\n\nconsole.log('webpack dev confi"
},
{
"path": "apps/transport/client/webpack.prod.js",
"chars": 844,
"preview": "const { merge } = require('webpack-merge')\nconst common = require('./webpack.common.js')\nconst CssMinimizerPlugin = requ"
},
{
"path": "apps/transport/lib/S3/aggregates_uploader.ex",
"chars": 2579,
"preview": "defmodule Transport.S3.AggregatesUploader do\n @moduledoc \"\"\"\n Helpers to upload a file, computes its sha256, and updat"
},
{
"path": "apps/transport/lib/S3/unzip.ex",
"chars": 1912,
"preview": "defmodule Transport.Unzip.S3.Behaviour do\n @moduledoc \"\"\"\n Behaviour to allow partial Unzip testing.\n \"\"\"\n\n @callbac"
},
{
"path": "apps/transport/lib/converters/converter.ex",
"chars": 226,
"preview": "defmodule Transport.Converters.Converter do\n @moduledoc \"\"\"\n A behaviour for data converters, used only for GTFS files"
},
{
"path": "apps/transport/lib/data_frame/requiredness_processing.ex",
"chars": 742,
"preview": "defmodule Transport.DataFrame.RequirednessProcessing do\n @moduledoc \"\"\"\n Wrap value validation results with requiredne"
},
{
"path": "apps/transport/lib/data_frame/validation_primitives.ex",
"chars": 7110,
"preview": "defmodule Transport.DataFrame.Validation.Primitives do\n @moduledoc \"\"\"\n Series-based validation primitives.\n\n Each fu"
},
{
"path": "apps/transport/lib/data_screens/data_screens.ex",
"chars": 785,
"preview": "defmodule Transport.Screens do\n @moduledoc \"\"\"\n Exploratory code that I have used from LiveBook.\n \"\"\"\n import Ecto.Q"
},
{
"path": "apps/transport/lib/datagouvfr/authentication.ex",
"chars": 1947,
"preview": "defmodule Datagouvfr.Authentication.Wrapper do\n @moduledoc \"\"\"\n An Authentication wrapper, useful for testing purposes"
},
{
"path": "apps/transport/lib/datagouvfr/client/api.ex",
"chars": 4944,
"preview": "defmodule Datagouvfr.Client.API do\n @moduledoc \"\"\"\n Request Datagouv API\n \"\"\"\n require Logger\n use Datagouvfr.Clien"
},
{
"path": "apps/transport/lib/datagouvfr/client/community_resources.ex",
"chars": 1759,
"preview": "defmodule Datagouvfr.Client.CommunityResources do\n @moduledoc \"\"\"\n This behaviour defines the API for interacting wi"
},
{
"path": "apps/transport/lib/datagouvfr/client/datasets.ex",
"chars": 4090,
"preview": "defmodule Datagouvfr.Client.Datasets do\n @moduledoc \"\"\"\n A wrapper to get datasets from data.gouv.fr API (or mock it f"
},
{
"path": "apps/transport/lib/datagouvfr/client/discussions.ex",
"chars": 2974,
"preview": "defmodule Datagouvfr.Client.Discussions.Wrapper do\n @moduledoc \"\"\"\n A behavior for discussions\n \"\"\"\n alias Datagouvf"
},
{
"path": "apps/transport/lib/datagouvfr/client/oauth.ex",
"chars": 2714,
"preview": "defmodule Datagouvfr.Client.OAuth do\n @moduledoc \"\"\"\n Request Datagouvfr API with OAuth\n \"\"\"\n use Datagouvfr.Client\n"
},
{
"path": "apps/transport/lib/datagouvfr/client/organization.ex",
"chars": 1071,
"preview": "defmodule Datagouvfr.Client.Organization.Wrapper do\n @moduledoc \"\"\"\n A Wrapper to get Organization from data.gouv.fr A"
},
{
"path": "apps/transport/lib/datagouvfr/client/resources.ex",
"chars": 6521,
"preview": "defmodule Datagouvfr.Client.Resources do\n @moduledoc \"\"\"\n A wrapper to get resources from data.gouv.fr API (or mock it"
},
{
"path": "apps/transport/lib/datagouvfr/client/reuses.ex",
"chars": 1725,
"preview": "defmodule Datagouvfr.Client.Reuses.Wrapper do\n @moduledoc \"\"\"\n Behavior for Datagouvfr Reuses\n \"\"\"\n @callback get(ma"
},
{
"path": "apps/transport/lib/datagouvfr/client/user.ex",
"chars": 2198,
"preview": "defmodule Datagouvfr.Client.User.Wrapper do\n @moduledoc \"\"\"\n A wrapper for the User module, useful for testing purpose"
},
{
"path": "apps/transport/lib/datagouvfr/client.ex",
"chars": 3011,
"preview": "defmodule Datagouvfr.Client do\n @moduledoc \"\"\"\n An API client for data.gouv.fr\n \"\"\"\n\n defmacro __using__([]) do\n "
},
{
"path": "apps/transport/lib/db/administrative_division.ex",
"chars": 4567,
"preview": "defmodule DB.AdministrativeDivision do\n @moduledoc \"\"\"\n AdministrativeDivision schema.\n\n This concept is used to repr"
},
{
"path": "apps/transport/lib/db/aom.ex",
"chars": 1094,
"preview": "defmodule DB.AOM do\n @moduledoc \"\"\"\n AOM schema\n\n There's a trigger on postgres on updates, it force an update of dat"
},
{
"path": "apps/transport/lib/db/api_request.ex",
"chars": 324,
"preview": "defmodule DB.APIRequest do\n @moduledoc \"\"\"\n Represents a HTTP request made to the API.\n \"\"\"\n use Ecto.Schema\n use T"
},
{
"path": "apps/transport/lib/db/autocomplete.ex",
"chars": 294,
"preview": "defmodule DB.Autocomplete do\n @moduledoc \"\"\"\n Autocomplete schema\n \"\"\"\n use Ecto.Schema\n use TypedEctoSchema\n\n @pr"
},
{
"path": "apps/transport/lib/db/breaking_news.ex",
"chars": 974,
"preview": "defmodule DB.BreakingNews do\n @moduledoc \"\"\"\n Store a message to be displayed on the site home page.\n \"\"\"\n use Ecto."
},
{
"path": "apps/transport/lib/db/commune.ex",
"chars": 761,
"preview": "defmodule DB.Commune do\n @moduledoc \"\"\"\n Commune schema\n \"\"\"\n use Ecto.Schema\n use TypedEctoSchema\n alias Geo.Mult"
},
{
"path": "apps/transport/lib/db/company.ex",
"chars": 1062,
"preview": "defmodule DB.Company do\n @moduledoc \"\"\"\n Represents a French company identified by its SIREN number.\n Data is fetched"
},
{
"path": "apps/transport/lib/db/contact.ex",
"chars": 13913,
"preview": "defmodule DB.Contact do\n @moduledoc \"\"\"\n Represents a contact/user\n A contact is created or updated each time a user "
},
{
"path": "apps/transport/lib/db/data_conversion.ex",
"chars": 3735,
"preview": "defmodule DB.DataConversion do\n @moduledoc \"\"\"\n DataConversion stores metadata for data conversions from one format to"
},
{
"path": "apps/transport/lib/db/data_import.ex",
"chars": 348,
"preview": "defmodule DB.DataImport do\n @moduledoc \"\"\"\n Table linking a ResourceHistory with a DataImport.\n A DataImport is for e"
},
{
"path": "apps/transport/lib/db/data_import_batch.ex",
"chars": 271,
"preview": "defmodule DB.DataImportBatch do\n @moduledoc \"\"\"\n Table storing the summary of a data import consolidation.\n \"\"\"\n use"
},
{
"path": "apps/transport/lib/db/dataset.ex",
"chars": 42570,
"preview": "defmodule DB.Dataset do\n @moduledoc \"\"\"\n Dataset schema\n\n There's a trigger on update on postgres to update the searc"
},
{
"path": "apps/transport/lib/db/dataset_follower.ex",
"chars": 1798,
"preview": "defmodule DB.DatasetFollower do\n @moduledoc \"\"\"\n Represents contacts following datasets.\n We insert data **only for e"
},
{
"path": "apps/transport/lib/db/dataset_geographic_view.ex",
"chars": 430,
"preview": "defmodule DB.DatasetGeographicView do\n @moduledoc \"\"\"\n View to ease the geographic metadata of a Dataset\n \"\"\"\n use E"
},
{
"path": "apps/transport/lib/db/dataset_history.ex",
"chars": 717,
"preview": "defmodule DB.DatasetHistory do\n @moduledoc \"\"\"\n Historisation of data related to a dataset.\n \"\"\"\n use Ecto.Schema\n "
},
{
"path": "apps/transport/lib/db/dataset_history_resources.ex",
"chars": 592,
"preview": "defmodule DB.DatasetHistoryResources do\n @moduledoc \"\"\"\n Resources that are part of a dataset history\n \"\"\"\n use Ecto"
},
{
"path": "apps/transport/lib/db/dataset_monthly_metric.ex",
"chars": 2474,
"preview": "defmodule DB.DatasetMonthlyMetric do\n @moduledoc \"\"\"\n Monthly metrics related to datasets as given by the data.gouv.fr"
},
{
"path": "apps/transport/lib/db/dataset_score.ex",
"chars": 3992,
"preview": "defmodule DB.DatasetScore do\n @moduledoc \"\"\"\n Give a dataset a score for different topics\n \"\"\"\n use Ecto.Schema\n us"
},
{
"path": "apps/transport/lib/db/dataset_subtype.ex",
"chars": 709,
"preview": "defmodule DB.DatasetSubtype do\n @moduledoc \"\"\"\n Represents dataset subtypes.\n A subtype has a parent_type (e.g., \"pub"
},
{
"path": "apps/transport/lib/db/default_token.ex",
"chars": 679,
"preview": "defmodule DB.DefaultToken do\n @moduledoc \"\"\"\n Represents a default token for a contact.\n \"\"\"\n use Ecto.Schema\n use "
},
{
"path": "apps/transport/lib/db/departement.ex",
"chars": 517,
"preview": "defmodule DB.Departement do\n @moduledoc \"\"\"\n Departement schema\n \"\"\"\n use Ecto.Schema\n use TypedEctoSchema\n\n typed"
},
{
"path": "apps/transport/lib/db/encrypted/binary.ex",
"chars": 230,
"preview": "defmodule DB.Encrypted.Binary do\n @moduledoc \"\"\"\n An encrypted binary type suitable for `cloak_ecto`\n https://hexdocs"
},
{
"path": "apps/transport/lib/db/epci.ex",
"chars": 1229,
"preview": "defmodule DB.EPCI do\n @moduledoc \"\"\"\n EPCI schema.\n\n The EPCI are loaded by the task transport/lib/transport/import_e"
},
{
"path": "apps/transport/lib/db/feature_usage.ex",
"chars": 900,
"preview": "defmodule DB.FeatureUsage do\n @moduledoc \"\"\"\n Logs when a feature has been used by a contact with metadata.\n \"\"\"\n us"
},
{
"path": "apps/transport/lib/db/geo_data/geo_data.ex",
"chars": 1253,
"preview": "defmodule DB.GeoData do\n @moduledoc \"\"\"\n Stores any kind of geographical data, typically from a resource\n \"\"\"\n use E"
},
{
"path": "apps/transport/lib/db/geo_data/geo_data_import.ex",
"chars": 417,
"preview": "defmodule DB.GeoDataImport do\n @moduledoc \"\"\"\n Links geo_data data with its source\n \"\"\"\n use Ecto.Schema\n use Typed"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_agency.ex",
"chars": 622,
"preview": "defmodule DB.GTFS.Agency do\n @moduledoc \"\"\"\n This contains the information present in GTFS agency.txt files.\n https:/"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_calendar.ex",
"chars": 662,
"preview": "defmodule DB.GTFS.Calendar do\n @moduledoc \"\"\"\n This contains the information present in GTFS calendar.txt files.\n htt"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_calendar_dates.ex",
"chars": 440,
"preview": "defmodule DB.GTFS.CalendarDates do\n @moduledoc \"\"\"\n This contains the information present in GTFS calendar_dates.txt f"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_stop.ex",
"chars": 465,
"preview": "defmodule DB.GTFS.Stops do\n @moduledoc \"\"\"\n This contains the information present in GTFS stops.txt files.\n https://d"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_stop_times.ex",
"chars": 499,
"preview": "defmodule DB.GTFS.StopTimes do\n @moduledoc \"\"\"\n This contains the information present in GTFS stops.txt files.\n https"
},
{
"path": "apps/transport/lib/db/gtfs/gtfs_trips.ex",
"chars": 403,
"preview": "defmodule DB.GTFS.Trips do\n @moduledoc \"\"\"\n This contains the information present in GTFS trips.txt files.\n https://d"
},
{
"path": "apps/transport/lib/db/hidden_reuser_alert.ex",
"chars": 3254,
"preview": "defmodule DB.HiddenReuserAlert do\n @moduledoc \"\"\"\n Stores alerts hidden by reusers in their reuser space.\n Each entry"
},
{
"path": "apps/transport/lib/db/irve_valid_file.ex",
"chars": 747,
"preview": "defmodule DB.IRVEValidFile do\n @moduledoc \"\"\"\n IRVE file that has been validated and stored. This file refers to a dat"
},
{
"path": "apps/transport/lib/db/irve_valid_pdc.ex",
"chars": 3164,
"preview": "defmodule DB.IRVEValidPDC do\n @moduledoc \"\"\"\n IRVE Point de Charge (PDC) record from a validated IRVE file.\n This sch"
},
{
"path": "apps/transport/lib/db/logs_import.ex",
"chars": 340,
"preview": "defmodule DB.LogsImport do\n @moduledoc \"\"\"\n LogsImport schema\n \"\"\"\n use Ecto.Schema\n use TypedEctoSchema\n alias DB"
},
{
"path": "apps/transport/lib/db/metrics.ex",
"chars": 3238,
"preview": "defmodule DB.Metrics do\n @moduledoc \"\"\"\n A quick metric schema to count bucketed events, until a moment may come\n whe"
},
{
"path": "apps/transport/lib/db/multi_validation.ex",
"chars": 9511,
"preview": "defmodule DB.MultiValidation do\n @moduledoc \"\"\"\n Validation model allowing multiple validations on the same data\n \"\"\""
},
{
"path": "apps/transport/lib/db/notification.ex",
"chars": 4272,
"preview": "defmodule DB.Notification do\n @moduledoc \"\"\"\n A list of emails notifications sent, with email addresses encrypted\n \"\""
},
{
"path": "apps/transport/lib/db/notification_subscription.ex",
"chars": 7480,
"preview": "defmodule DB.NotificationSubscription do\n @moduledoc \"\"\"\n Represents a subscription to a notification type for a `DB.C"
},
{
"path": "apps/transport/lib/db/offer.ex",
"chars": 1548,
"preview": "defmodule DB.Offer do\n @moduledoc \"\"\"\n Represents transport offers.\n \"\"\"\n use TypedEctoSchema\n use Ecto.Schema\n im"
},
{
"path": "apps/transport/lib/db/organization.ex",
"chars": 961,
"preview": "defmodule DB.Organization do\n @moduledoc \"\"\"\n Represents an organization on data.gouv.fr\n \"\"\"\n use TypedEctoSchema\n "
},
{
"path": "apps/transport/lib/db/postgrex_types.ex",
"chars": 69,
"preview": "Postgrex.Types.define(DB.PostgrexTypes, [Geo.PostGIS.Extension], [])\n"
},
{
"path": "apps/transport/lib/db/processing_report.ex",
"chars": 564,
"preview": "defmodule DB.ProcessingReport do\n @moduledoc \"\"\"\n A generic reporting structure used to store a JSON-type report.\n\n C"
},
{
"path": "apps/transport/lib/db/proxy_request.ex",
"chars": 306,
"preview": "defmodule DB.ProxyRequest do\n @moduledoc \"\"\"\n Represents a HTTP request made to the proxy.\n \"\"\"\n use Ecto.Schema\n u"
},
{
"path": "apps/transport/lib/db/region.ex",
"chars": 630,
"preview": "defmodule DB.Region do\n @moduledoc \"\"\"\n Region schema\n\n There's a trigger on postgres on updates, it force an update "
},
{
"path": "apps/transport/lib/db/repo.ex",
"chars": 135,
"preview": "defmodule DB.Repo do\n use Ecto.Repo,\n otp_app: :transport,\n adapter: Ecto.Adapters.Postgres\n\n use Scrivener, pag"
},
{
"path": "apps/transport/lib/db/resource.ex",
"chars": 15727,
"preview": "defmodule DB.Resource do\n @moduledoc \"\"\"\n Resource model\n \"\"\"\n use Ecto.Schema\n use TypedEctoSchema\n alias DB.{Dat"
},
{
"path": "apps/transport/lib/db/resource_download.ex",
"chars": 310,
"preview": "defmodule DB.ResourceDownload do\n @moduledoc \"\"\"\n Represents a resource download.\n \"\"\"\n use Ecto.Schema\n use TypedE"
},
{
"path": "apps/transport/lib/db/resource_history.ex",
"chars": 3367,
"preview": "defmodule DB.ResourceHistory do\n @moduledoc \"\"\"\n ResourceHistory stores metadata when resources are historicized.\n \"\""
},
{
"path": "apps/transport/lib/db/resource_metadata.ex",
"chars": 1279,
"preview": "defmodule DB.ResourceMetadata do\n @moduledoc \"\"\"\n Metadata extracted from a resource.\n For example transport modes av"
},
{
"path": "apps/transport/lib/db/resource_monthly_metric.ex",
"chars": 2706,
"preview": "defmodule DB.ResourceMonthlyMetric do\n @moduledoc \"\"\"\n Monthly metrics related to resources as given by the data.gouv."
},
{
"path": "apps/transport/lib/db/resource_related.ex",
"chars": 959,
"preview": "defmodule DB.ResourceRelated do\n @moduledoc \"\"\"\n A module to deal with `DB.Resource` related to other `DB.Resource`\n "
},
{
"path": "apps/transport/lib/db/resource_unavailability.ex",
"chars": 4057,
"preview": "defmodule DB.ResourceUnavailability do\n @moduledoc \"\"\"\n Model used to store when a resource is not available over HTTP"
},
{
"path": "apps/transport/lib/db/reuse.ex",
"chars": 5307,
"preview": "defmodule DB.Reuse do\n @moduledoc \"\"\"\n Represents data.gouv.fr reuses.\n \"\"\"\n use TypedEctoSchema\n use Ecto.Schema\n "
}
]
// ... and 1373 more files (download for full content)
About this extraction
This page contains the full source code of the etalab/transport-site GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 1573 files (19.7 MB), approximately 5.3M tokens, and a symbol index with 6615 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.