From 5e0b7245479e4dc082ecaf61d48f8a8c63d97b07 Mon Sep 17 00:00:00 2001 From: Francis Chabouis Date: Mon, 28 Feb 2022 17:10:27 +0100 Subject: [PATCH] =?UTF-8?q?Remont=C3=A9e=20en=20base=20de=20fichier=20GTFS?= =?UTF-8?q?=20et=20calculs=20des=20prochains=20d=C3=A9parts=20(#2105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * create tables and schemas * playing around with Unzip * properly fill stops table from file * format * change table name * clean temp file * add calendar table * streaming all the way ❤️ * create gtfs_stop_times table and schema * add nimblecsv dep * fill the stop_times table * format files * fix credo warnings * add calendar dates import * fill data_import_id * add trips * add an array for dow in gtfs_calendar * add query for next departures * add some factories * update factory * add a simple test for next departure * add tests for exceptions * correct the query, thanks to the tests * new test with exception on top of regular calendar * additional tests * real definition of the datetime (for time changes) * format * make location_type integer * extract functions * create gtfs file for db import test * add gtfs to db import tests * extract function and test it * ignore file for dialyzer * add gtfs reference links Co-authored-by: Thibaut Barrère --- .dialyzer_ignore.exs | 4 +- apps/db/lib/db/data_import.ex | 11 + apps/db/lib/db/gtfs_calendar dates.ex | 16 + apps/db/lib/db/gtfs_calendar.ex | 24 ++ apps/db/lib/db/gtfs_import.ex | 12 + apps/db/lib/db/gtfs_stop.ex | 17 + apps/db/lib/db/gtfs_stop_times.ex | 17 + apps/db/lib/db/gtfs_trips.ex | 16 + apps/db/mix.exs | 3 +- ...20220208143147_create_gtfs_stops_table.exs | 47 +++ ...220210142527_add_table_gtfs_stop_times.exs | 24 ++ apps/db/test/support/factory.ex | 20 ++ apps/transport/lib/{ => S3}/s3.ex | 0 apps/transport/lib/S3/unzip.ex | 52 +++ apps/transport/lib/jobs/gtfs_to_db.ex | 217 +++++++++++++ apps/transport/lib/transport/gtfs_query.ex | 47 +++ apps/transport/mix.exs | 3 +- .../test/fixture/files/gtfs_import.zip | Bin 0 -> 1936 bytes .../test/transport/gtfs_query_test.exs | 295 ++++++++++++++++++ .../test/transport/jobs/gtfs_to_db_test.exs | 151 +++++++++ mix.lock | 2 + 21 files changed, 975 insertions(+), 3 deletions(-) create mode 100644 apps/db/lib/db/data_import.ex create mode 100644 apps/db/lib/db/gtfs_calendar dates.ex create mode 100644 apps/db/lib/db/gtfs_calendar.ex create mode 100644 apps/db/lib/db/gtfs_import.ex create mode 100644 apps/db/lib/db/gtfs_stop.ex create mode 100644 apps/db/lib/db/gtfs_stop_times.ex create mode 100644 apps/db/lib/db/gtfs_trips.ex create mode 100644 apps/db/priv/repo/migrations/20220208143147_create_gtfs_stops_table.exs create mode 100644 apps/db/priv/repo/migrations/20220210142527_add_table_gtfs_stop_times.exs rename apps/transport/lib/{ => S3}/s3.ex (100%) create mode 100644 apps/transport/lib/S3/unzip.ex create mode 100644 apps/transport/lib/jobs/gtfs_to_db.ex create mode 100644 apps/transport/lib/transport/gtfs_query.ex create mode 100644 apps/transport/test/fixture/files/gtfs_import.zip create mode 100644 apps/transport/test/transport/gtfs_query_test.exs create mode 100644 apps/transport/test/transport/jobs/gtfs_to_db_test.exs diff --git a/.dialyzer_ignore.exs b/.dialyzer_ignore.exs index 605ad120da..3675618c93 100644 --- a/.dialyzer_ignore.exs +++ b/.dialyzer_ignore.exs @@ -1,5 +1,7 @@ [ # temporary fix for https://github.com/elixir-ecto/postgrex/issues/549 ~r/deps\/postgrex\/lib\/postgrex\/type_module.ex/, - ~r/lib\/postgrex\/type_module.ex/ + ~r/lib\/postgrex\/type_module.ex/, + # EctoInterval raises an unknown_type error + ~r/lib\/db\/gtfs_stop_times.ex/ ] diff --git a/apps/db/lib/db/data_import.ex b/apps/db/lib/db/data_import.ex new file mode 100644 index 0000000000..6b495627c6 --- /dev/null +++ b/apps/db/lib/db/data_import.ex @@ -0,0 +1,11 @@ +defmodule DB.DataImport do + @moduledoc """ + Table linking a ResourceHistory with a DataImport + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "data_import" do + belongs_to(:resource_history, DB.ResourceHistory) + end +end diff --git a/apps/db/lib/db/gtfs_calendar dates.ex b/apps/db/lib/db/gtfs_calendar dates.ex new file mode 100644 index 0000000000..085047e029 --- /dev/null +++ b/apps/db/lib/db/gtfs_calendar dates.ex @@ -0,0 +1,16 @@ +defmodule DB.GtfsCalendarDates do + @moduledoc """ + This contains the information present in GTFS calendar_dates.txt files. + https://developers.google.com/transit/gtfs/reference?hl=fr#calendar_datestxt + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "gtfs_calendar_dates" do + belongs_to(:data_import, DB.GtfsImport) + + field(:service_id, :binary) + field(:date, :date) + field(:exception_type, :integer) + end +end diff --git a/apps/db/lib/db/gtfs_calendar.ex b/apps/db/lib/db/gtfs_calendar.ex new file mode 100644 index 0000000000..584ca228c9 --- /dev/null +++ b/apps/db/lib/db/gtfs_calendar.ex @@ -0,0 +1,24 @@ +defmodule DB.GtfsCalendar do + @moduledoc """ + This contains the information present in GTFS calendar.txt files. + https://developers.google.com/transit/gtfs/reference?hl=fr#calendartxt + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "gtfs_calendar" do + belongs_to(:data_import, DB.GtfsImport) + + field(:service_id, :binary) + field(:monday, :integer) + field(:tuesday, :integer) + field(:wednesday, :integer) + field(:thursday, :integer) + field(:friday, :integer) + field(:saturday, :integer) + field(:sunday, :integer) + field(:days, {:array, :integer}) + field(:start_date, :date) + field(:end_date, :date) + end +end diff --git a/apps/db/lib/db/gtfs_import.ex b/apps/db/lib/db/gtfs_import.ex new file mode 100644 index 0000000000..07e34c1f1d --- /dev/null +++ b/apps/db/lib/db/gtfs_import.ex @@ -0,0 +1,12 @@ +defmodule DB.GtfsImport do + @moduledoc """ + GtfsImport list the imports done for each Resource History. + It will be a good place to add information about which import is currently in use, publishesd, etc + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "data_import" do + belongs_to(:resource_history, DB.ResourceHistory) + end +end diff --git a/apps/db/lib/db/gtfs_stop.ex b/apps/db/lib/db/gtfs_stop.ex new file mode 100644 index 0000000000..df28c1a1c3 --- /dev/null +++ b/apps/db/lib/db/gtfs_stop.ex @@ -0,0 +1,17 @@ +defmodule DB.GtfsStops do + @moduledoc """ + This contains the information present in GTFS stops.txt files. + https://developers.google.com/transit/gtfs/reference?hl=fr#stopstxt + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "gtfs_stops" do + belongs_to(:data_import, DB.GtfsImport) + field(:stop_id, :binary) + field(:stop_name, :binary) + field(:stop_lat, :float) + field(:stop_lon, :float) + field(:location_type, :integer) + end +end diff --git a/apps/db/lib/db/gtfs_stop_times.ex b/apps/db/lib/db/gtfs_stop_times.ex new file mode 100644 index 0000000000..3b68f837c7 --- /dev/null +++ b/apps/db/lib/db/gtfs_stop_times.ex @@ -0,0 +1,17 @@ +defmodule DB.GtfsStopTimes do + @moduledoc """ + This contains the information present in GTFS stops.txt files. + https://developers.google.com/transit/gtfs/reference?hl=fr#stop_timestxt + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "gtfs_stop_times" do + belongs_to(:data_import, DB.GtfsImport) + field(:trip_id, :binary) + field(:stop_id, :binary) + field(:stop_sequence, :integer) + field(:arrival_time, EctoInterval) + field(:departure_time, EctoInterval) + end +end diff --git a/apps/db/lib/db/gtfs_trips.ex b/apps/db/lib/db/gtfs_trips.ex new file mode 100644 index 0000000000..abf4e1692c --- /dev/null +++ b/apps/db/lib/db/gtfs_trips.ex @@ -0,0 +1,16 @@ +defmodule DB.GtfsTrips do + @moduledoc """ + This contains the information present in GTFS trips.txt files. + https://developers.google.com/transit/gtfs/reference?hl=fr#tripstxt + """ + use Ecto.Schema + use TypedEctoSchema + + typed_schema "gtfs_trips" do + belongs_to(:data_import, DB.GtfsImport) + + field(:route_id, :binary) + field(:service_id, :binary) + field(:trip_id, :binary) + end +end diff --git a/apps/db/mix.exs b/apps/db/mix.exs index c86d49ebd2..a4ea650eb4 100644 --- a/apps/db/mix.exs +++ b/apps/db/mix.exs @@ -52,7 +52,8 @@ defmodule Db.MixProject do {:sentry, ">= 0.0.0"}, {:typed_ecto_schema, ">= 0.1.1"}, {:ex_machina, "~> 2.4", only: :test}, - {:oban, "~> 2.9"} + {:oban, "~> 2.9"}, + {:ecto_interval, git: "https://github.com/etalab/ecto_interval", ref: "master"} ] end end diff --git a/apps/db/priv/repo/migrations/20220208143147_create_gtfs_stops_table.exs b/apps/db/priv/repo/migrations/20220208143147_create_gtfs_stops_table.exs new file mode 100644 index 0000000000..7ea2bfed6b --- /dev/null +++ b/apps/db/priv/repo/migrations/20220208143147_create_gtfs_stops_table.exs @@ -0,0 +1,47 @@ +defmodule DB.Repo.Migrations.CreateGtfsStopsTable do + use Ecto.Migration + + def change do + create table(:data_import) do + add :resource_history_id, references(:resource_history) + end + + create table(:gtfs_stops) do + add(:data_import_id, references(:data_import)) + add(:stop_id, :binary) + add(:stop_name, :binary) + add(:stop_lat, :float) + add(:stop_lon, :float) + add(:location_type, :integer) + end + + create table(:gtfs_calendar) do + add(:data_import_id, references(:data_import)) + add(:service_id, :binary) + add(:monday, :integer) + add(:tuesday, :integer) + add(:wednesday, :integer) + add(:thursday, :integer) + add(:friday, :integer) + add(:saturday, :integer) + add(:sunday, :integer) + add(:days, {:array, :integer}) + add(:start_date, :date) + add(:end_date, :date) + end + + create table(:gtfs_calendar_dates) do + add(:data_import_id, references(:data_import)) + add(:service_id, :binary) + add(:date, :date) + add(:exception_type, :integer) + end + + create table(:gtfs_trips) do + add(:data_import_id, references(:data_import)) + add(:route_id, :binary) + add(:service_id, :binary) + add(:trip_id, :binary) + end + end +end diff --git a/apps/db/priv/repo/migrations/20220210142527_add_table_gtfs_stop_times.exs b/apps/db/priv/repo/migrations/20220210142527_add_table_gtfs_stop_times.exs new file mode 100644 index 0000000000..8846c7bc6b --- /dev/null +++ b/apps/db/priv/repo/migrations/20220210142527_add_table_gtfs_stop_times.exs @@ -0,0 +1,24 @@ +defmodule DB.Repo.Migrations.AddTableGtfsStopTimes do + use Ecto.Migration + + def up do + create table(:gtfs_stop_times) do + add(:data_import_id, references(:data_import)) + add(:trip_id, :binary) + add(:stop_id, :binary) + add(:stop_sequence, :integer) + end + + execute """ + alter table gtfs_stop_times add column arrival_time interval hour to second; + """ + + execute """ + alter table gtfs_stop_times add column departure_time interval hour to second; + """ + end + + def down do + execute "drop table gtfs_stop_times;" + end +end diff --git a/apps/db/test/support/factory.ex b/apps/db/test/support/factory.ex index 8b5cb70572..02fd9dda91 100644 --- a/apps/db/test/support/factory.ex +++ b/apps/db/test/support/factory.ex @@ -67,6 +67,26 @@ defmodule DB.Factory do } end + def data_import_factory do + %DB.DataImport{} + end + + def gtfs_stop_times_factory do + %DB.GtfsStopTimes{} + end + + def gtfs_trips_factory do + %DB.GtfsTrips{} + end + + def gtfs_calendar_factory do + %DB.GtfsCalendar{} + end + + def gtfs_calendar_dates_factory do + %DB.GtfsCalendarDates{} + end + def validation_factory do %DB.Validation{} end diff --git a/apps/transport/lib/s3.ex b/apps/transport/lib/S3/s3.ex similarity index 100% rename from apps/transport/lib/s3.ex rename to apps/transport/lib/S3/s3.ex diff --git a/apps/transport/lib/S3/unzip.ex b/apps/transport/lib/S3/unzip.ex new file mode 100644 index 0000000000..31d3ce1280 --- /dev/null +++ b/apps/transport/lib/S3/unzip.ex @@ -0,0 +1,52 @@ +defmodule Transport.Unzip.S3File do + @moduledoc """ + Read a remote zip file stored on a S3 bucket, as explained here + https://hexdocs.pm/unzip/readme.html + """ + + defstruct [:path, :bucket, :s3_config] + alias __MODULE__ + + def new(path, bucket, s3_config) do + %S3File{path: path, bucket: bucket, s3_config: s3_config} + end + + def get_file_stream(file_name, zip_name, bucket_name) do + aws_s3_config = + ExAws.Config.new(:s3, + access_key_id: [Application.fetch_env!(:ex_aws, :access_key_id), :instance_role], + secret_access_key: [Application.fetch_env!(:ex_aws, :secret_access_key), :instance_role] + ) + + file = new(zip_name, bucket_name, aws_s3_config) + {:ok, unzip} = Unzip.new(file) + Unzip.file_stream!(unzip, file_name) + end +end + +defimpl Unzip.FileAccess, for: Transport.Unzip.S3File do + alias ExAws.S3 + + def size(file) do + %{headers: headers} = file.bucket |> S3.head_object(file.path) |> ExAws.request!(file.s3_config) + + size = + headers + |> Enum.find(fn {k, _} -> String.downcase(k) == "content-length" end) + |> elem(1) + |> String.to_integer() + + {:ok, size} + end + + def pread(file, offset, length) do + {_, chunk} = + S3.Download.get_chunk( + %S3.Download{bucket: file.bucket, path: file.path, dest: nil}, + %{start_byte: offset, end_byte: offset + length - 1}, + file.s3_config + ) + + {:ok, chunk} + end +end diff --git a/apps/transport/lib/jobs/gtfs_to_db.ex b/apps/transport/lib/jobs/gtfs_to_db.ex new file mode 100644 index 0000000000..246a73231c --- /dev/null +++ b/apps/transport/lib/jobs/gtfs_to_db.ex @@ -0,0 +1,217 @@ +defmodule Transport.Jobs.GtfsToDB do + @moduledoc """ + Get the content of a GTFS ResourceHistory, store it in the DB + """ + + def import_gtfs_from_resource_history(resource_history_id) do + %{id: data_import_id} = %DB.DataImport{resource_history_id: 200} |> DB.Repo.insert!() + + fill_stops_from_resource_history(resource_history_id, data_import_id) + fill_stop_times_from_resource_history(resource_history_id, data_import_id) + fill_calendar_from_resource_history(resource_history_id, data_import_id) + fill_calendar_dates_from_resource_history(resource_history_id, data_import_id) + fill_trips_from_resource_history(resource_history_id, data_import_id) + end + + def fill_stops_from_resource_history(resource_history_id, data_import_id) do + %{payload: %{"filename" => filename}} = DB.ResourceHistory |> DB.Repo.get!(resource_history_id) + bucket_name = Transport.S3.bucket_name(:history) + + file_stream = Transport.Unzip.S3File.get_file_stream("stops.txt", filename, bucket_name) + stops_stream_insert(file_stream, data_import_id) + end + + def stops_stream_insert(file_stream, data_import_id) do + DB.Repo.transaction(fn -> + file_stream + |> to_stream_of_maps() + # the map is reshaped for Ecto's needs + |> Stream.map(fn r -> + %{ + data_import_id: data_import_id, + stop_id: r |> Map.fetch!("stop_id"), + stop_name: r |> Map.fetch!("stop_name"), + stop_lat: r |> Map.fetch!("stop_lat") |> String.to_float(), + stop_lon: r |> Map.fetch!("stop_lon") |> String.to_float(), + location_type: r |> Map.fetch!("location_type") |> String.to_integer() + } + end) + |> Stream.chunk_every(1000) + |> Stream.each(fn chunk -> DB.Repo.insert_all(DB.GtfsStops, chunk) end) + |> Stream.run() + end) + end + + @doc """ + Transform the stream outputed by Unzip to a stream of maps, each map + corresponding to a row from the CSV. + """ + def to_stream_of_maps(file_stream) do + file_stream + # transform the stream to a stream of binaries + |> Stream.map(fn c -> IO.iodata_to_binary(c) end) + # stream line by line + |> NimbleCSV.RFC4180.to_line_stream() + |> NimbleCSV.RFC4180.parse_stream(skip_headers: false) + # transform the stream to a stream of maps %{column_name1: value1, ...} + |> Stream.transform([], fn r, acc -> + if acc == [] do + {%{}, r |> Enum.map(fn h -> h |> String.replace_prefix("\uFEFF", "") end)} + else + {[acc |> Enum.zip(r) |> Enum.into(%{})], acc} + end + end) + end + + def fill_calendar_from_resource_history(resource_history_id, data_import_id) do + %{payload: %{"filename" => filename}} = DB.ResourceHistory |> DB.Repo.get!(resource_history_id) + bucket_name = Transport.S3.bucket_name(:history) + + file_stream = Transport.Unzip.S3File.get_file_stream("calendar.txt", filename, bucket_name) + + calendar_stream_insert(file_stream, data_import_id) + end + + def calendar_stream_insert(file_stream, data_import_id) do + DB.Repo.transaction(fn -> + file_stream + |> to_stream_of_maps() + |> Stream.map(fn r -> + res = %{ + data_import_id: data_import_id, + service_id: r |> Map.fetch!("service_id"), + monday: monday = r |> Map.fetch!("monday") |> String.to_integer(), + tuesday: tuesday = r |> Map.fetch!("tuesday") |> String.to_integer(), + wednesday: wednesday = r |> Map.fetch!("wednesday") |> String.to_integer(), + thursday: thursday = r |> Map.fetch!("thursday") |> String.to_integer(), + friday: friday = r |> Map.fetch!("friday") |> String.to_integer(), + saturday: saturday = r |> Map.fetch!("saturday") |> String.to_integer(), + sunday: sunday = r |> Map.fetch!("sunday") |> String.to_integer(), + start_date: r |> Map.fetch!("start_date") |> Timex.parse!("{YYYY}{0M}{0D}") |> NaiveDateTime.to_date(), + end_date: r |> Map.fetch!("end_date") |> Timex.parse!("{YYYY}{0M}{0D}") |> NaiveDateTime.to_date() + } + + res + |> Map.put(:days, get_dow_array([monday, tuesday, wednesday, thursday, friday, saturday, sunday])) + end) + |> Stream.chunk_every(1000) + |> Stream.each(fn chunk -> DB.Repo.insert_all(DB.GtfsCalendar, chunk) end) + |> Stream.run() + end) + end + + @doc """ + Takes values (0 or 1) for each day, return an array with the days of weeks having their value equal to 1 + + iex> get_dow_array([1,0,1,0,0,0,1]) + [1,3,7] + """ + def get_dow_array([_monday, _tuesday, _wednesday, _thursday, _friday, _saturday, _sunday] = dows) do + dows + |> Enum.with_index() + |> Enum.filter(&(elem(&1, 0) == 1)) + |> Enum.map(&(elem(&1, 1) + 1)) + end + + def fill_stop_times_from_resource_history(resource_history_id, data_import_id) do + %{payload: %{"filename" => filename}} = DB.ResourceHistory |> DB.Repo.get!(resource_history_id) + bucket_name = Transport.S3.bucket_name(:history) + file_stream = Transport.Unzip.S3File.get_file_stream("stop_times.txt", filename, bucket_name) + stop_times_stream_insert(file_stream, data_import_id) + end + + def stop_times_stream_insert(file_stream, data_import_id) do + DB.Repo.transaction( + fn -> + file_stream + |> to_stream_of_maps() + |> Stream.map(fn r -> + %{ + data_import_id: data_import_id, + trip_id: r |> Map.fetch!("trip_id"), + arrival_time: r |> Map.fetch!("arrival_time") |> cast_binary_to_interval(), + departure_time: r |> Map.fetch!("departure_time") |> cast_binary_to_interval(), + stop_id: r |> Map.fetch!("stop_id"), + stop_sequence: r |> Map.fetch!("stop_sequence") |> String.to_integer() + } + end) + |> Stream.chunk_every(1000) + |> Stream.each(fn chunk -> DB.Repo.insert_all(DB.GtfsStopTimes, chunk) end) + |> Stream.run() + end, + timeout: 240_000 + ) + end + + def cast_binary_to_interval(s) do + %{"hours" => hours, "minutes" => minutes, "seconds" => seconds} = + Regex.named_captures(~r/(?[0-9]+):(?[0-9]+):(?[0-9]+)/, s) + + hours = hours |> String.to_integer() + minutes = minutes |> String.to_integer() + seconds = seconds |> String.to_integer() + + # this is what EctoInterval is able to cast into a Postgrex.Interval + %{ + secs: hours * 60 * 60 + minutes * 60 + seconds, + days: 0, + months: 0 + } + end + + def fill_calendar_dates_from_resource_history(resource_history_id, data_import_id) do + %{payload: %{"filename" => filename}} = DB.ResourceHistory |> DB.Repo.get!(resource_history_id) + bucket_name = Transport.S3.bucket_name(:history) + file_stream = Transport.Unzip.S3File.get_file_stream("calendar_dates.txt", filename, bucket_name) + calendar_dates_stream_insert(file_stream, data_import_id) + end + + def calendar_dates_stream_insert(file_stream, data_import_id) do + DB.Repo.transaction( + fn -> + file_stream + |> to_stream_of_maps() + |> Stream.map(fn r -> + %{ + data_import_id: data_import_id, + service_id: r |> Map.fetch!("service_id"), + date: r |> Map.fetch!("date") |> Timex.parse!("{YYYY}{0M}{0D}") |> NaiveDateTime.to_date(), + exception_type: r |> Map.fetch!("exception_type") |> String.to_integer() + } + end) + |> Stream.chunk_every(1000) + |> Stream.each(fn chunk -> DB.Repo.insert_all(DB.GtfsCalendarDates, chunk) end) + |> Stream.run() + end, + timeout: 240_000 + ) + end + + def fill_trips_from_resource_history(resource_history_id, data_import_id) do + %{payload: %{"filename" => filename}} = DB.ResourceHistory |> DB.Repo.get!(resource_history_id) + bucket_name = Transport.S3.bucket_name(:history) + file_stream = Transport.Unzip.S3File.get_file_stream("trips.txt", filename, bucket_name) + trips_stream_insert(file_stream, data_import_id) + end + + def trips_stream_insert(file_stream, data_import_id) do + DB.Repo.transaction( + fn -> + file_stream + |> to_stream_of_maps() + |> Stream.map(fn r -> + %{ + data_import_id: data_import_id, + service_id: r |> Map.fetch!("service_id"), + route_id: r |> Map.fetch!("route_id"), + trip_id: r |> Map.fetch!("trip_id") + } + end) + |> Stream.chunk_every(1000) + |> Stream.each(fn chunk -> DB.Repo.insert_all(DB.GtfsTrips, chunk) end) + |> Stream.run() + end, + timeout: 240_000 + ) + end +end diff --git a/apps/transport/lib/transport/gtfs_query.ex b/apps/transport/lib/transport/gtfs_query.ex new file mode 100644 index 0000000000..8fe8e0e69a --- /dev/null +++ b/apps/transport/lib/transport/gtfs_query.ex @@ -0,0 +1,47 @@ +defmodule Transport.GtfsQuery do + @moduledoc """ + Make queries on gtfs data imports for useful information extraction. + e.g. Get next departures from a stop + """ + + def next_departures(stop_id, import_id, datetime_start, window_in_minutes) do + datetime_end = DateTime.add(datetime_start, window_in_minutes * 60, :second) + no_dates_before = datetime_start |> DateTime.to_date() |> Date.add(-2) + + query = """ + with stop_times as (select gst.*, gt.route_id, gt.service_id, gc.days, gc.start_date, gc.end_date from gtfs_stop_times gst + left join gtfs_trips gt on gst.trip_id = gt.trip_id and gst.data_import_id = gt.data_import_id + left join gtfs_calendar gc on gc.service_id = gt.service_id and gst.data_import_id = gc.data_import_id + where stop_id = $1 and gst.data_import_id = $2), + service_ids as (select distinct(service_id) from stop_times), + days_list as ( + with gs as ( + select generate_series(start_date, end_date, '1 day') as day, days, service_id from gtfs_calendar where service_id in (select * from service_ids) and end_date > $5 and data_import_id = $2 + ), + gdow as ( + select day::date, days, extract (isodow from day) as dow, service_id from gs + ), + exception_add as ( + select date as day, service_id from gtfs_calendar_dates where exception_type = 1 and service_id in (select * from service_ids) and data_import_id = $2 + ), + exception_remove as ( + select date as day, service_id from gtfs_calendar_dates where exception_type = 2 and service_id in (select * from service_ids) and data_import_id = $2 + ), + res as ( + select * from gdow where dow = any(days) and day not in (select day from exception_remove er where er.service_id = gdow.service_id) + union select day, '{}', extract (isodow from day), service_id from exception_add + ) + select distinct on (day, service_id) day, service_id from res order by day asc + ), + departures as ( + select st.*, dl.day, dl.day + '12:00:00'::time - interval '12 hours' + st.departure_time as real_departure from stop_times st left join days_list dl on dl.service_id = st.service_id + ) + select stop_id, trip_id, route_id, service_id, real_departure as departure from departures where real_departure > $3 and real_departure < $4 order by real_departure asc; + """ + + %{columns: columns, rows: rows} = + Ecto.Adapters.SQL.query!(DB.Repo, query, [stop_id, import_id, datetime_start, datetime_end, no_dates_before]) + + rows |> Enum.map(fn row -> columns |> Enum.zip(row) |> Map.new() end) + end +end diff --git a/apps/transport/mix.exs b/apps/transport/mix.exs index 4ccbdde4f8..64dce5130c 100644 --- a/apps/transport/mix.exs +++ b/apps/transport/mix.exs @@ -89,7 +89,8 @@ defmodule Transport.Mixfile do {:etag_plug, "~> 1.0"}, {:oban, "~> 2.9"}, {:unzip, "~> 0.6.0"}, - {:protobuf, "~> 0.9.0"} + {:protobuf, "~> 0.9.0"}, + {:nimble_csv, "~> 1.2.0"} ] end end diff --git a/apps/transport/test/fixture/files/gtfs_import.zip b/apps/transport/test/fixture/files/gtfs_import.zip new file mode 100644 index 0000000000000000000000000000000000000000..f8aa84ae6c9cb6a9026a6ef8cb00f534ff46ad37 GIT binary patch literal 1936 zcmWIWW@Zs#U|`^2P}2zTN}F^gOcTh{2VyB8cC9GLFDeO6Eh@`QPSs1!NKMWzF3r`; zO)(Wc=E%jMz~OpO*mlMI1U=E+|EI1xYan}NiL0Hk@bQVYdv6{!m{zw)bAJGe1vBq7 zA1?+P+z-TDK%AJKnwMOuS5i@8dvYV!Ap-%13wtlrYs~FW3ogiCIJ;h9VchLzZ@n0y zMgL|s&wrx-BH@nQp6{Pi&A*)f>lFUuWOV!0$el(L54ZoQ-}p7U^v3>+X2+SoOrDhT z?%7u9d`H7$a^I#LIyocg)B%YO)rT88nhjU8n53^3Nes=H%@1}OGsy2Uu|XkR9MTDy zKqqYgVjdt)PRvQoOGzvOIWDxvk*mRghvj{f{mX@GACxbd(P0~*=qq~v*+KQ6tiIlY zk%#zFZv5V(bm^J9jLnKv-Uoa~ITwHF6X5zdN8_+doR*c}6vft<;Gk4*onTxRlFi@@8-DM?m*9NOd(B{mjfvkkc2x5#TJ# z%&pBFtj)}9aG2)=!%T)r23IaNY8f$|l$2t4s=%{DNx4G_$#JVAgF^PQp5RRfI%+l$ zb3z@52zbvrkh}P1_?`3jY~AolsCeO;IY)#vl3L(z&^R#Phcl`zMdmoEa6Y$@7deD ztc}Z`x;Jr&5YJc72irAsn?-{StJW->pky-X>f2dn<;L7P`-?+d+GcqqGdUQa>QTJh zBqqFBCSu+6&1Stfp7i$Zp7cg0Z{hp(XPsxqzP4!95AbGW(q+J1U;&+o1aKE$=;ol8 zO9(>}flNdx1=45FEP|uZP=RVwXxP&D2gpV)LV%?uSeB7Nf`LINfoY4%4aNrR)c>1- zG>ngII%fVyHocz_VLGBTK(S+oaP?M@*>|V@ZW`GUaE2Ne?FbckZK*=zL;Afw&SNr7o7AKlC7 z*$Cm~`9PylG8M9`F!KY7t5}e72ePXe-fOXJ14TrQ*?n+CK=}dQtZX2gIf3vrFv!*e I4PamZ09|NU$N&HU literal 0 HcmV?d00001 diff --git a/apps/transport/test/transport/gtfs_query_test.exs b/apps/transport/test/transport/gtfs_query_test.exs new file mode 100644 index 0000000000..42ee031b9e --- /dev/null +++ b/apps/transport/test/transport/gtfs_query_test.exs @@ -0,0 +1,295 @@ +defmodule Transport.GtfsQueryTest do + use ExUnit.Case + import DB.Factory + import Transport.Jobs.GtfsToDB + + setup do + Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo) + end + + defp insert_a_departure(opts) do + data_import_id = Keyword.fetch!(opts, :data_import_id) + departure_time = Keyword.fetch!(opts, :departure_time) + trip_id = Keyword.get(opts, :trip_id, "trip_1") + stop_id = Keyword.get(opts, :stop_id, "stop_1") + service_id = Keyword.get(opts, :service_id, "service_1") + route_id = Keyword.get(opts, :route_id, "route_1") + days = Keyword.get(opts, :days, [1, 2, 3, 4, 5, 6, 7]) + + :gtfs_stop_times + |> insert( + data_import_id: data_import_id, + trip_id: trip_id, + departure_time: departure_time, + stop_id: stop_id + ) + + :gtfs_trips + |> insert( + data_import_id: data_import_id, + service_id: service_id, + trip_id: trip_id, + route_id: route_id + ) + + :gtfs_calendar + |> insert( + data_import_id: data_import_id, + service_id: service_id, + start_date: Date.new!(2022, 1, 1), + end_date: Date.new!(2022, 1, 31), + days: days + ) + + %{ + data_import_id: data_import_id, + departure_time: departure_time, + trip_id: trip_id, + stop_id: stop_id, + service_id: service_id, + route_id: route_id, + days: days + } + end + + describe "compute next departures" do + test "very simple case" do + departure_time = "08:05:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + %{ + trip_id: trip_id, + stop_id: stop_id, + service_id: service_id, + route_id: route_id + } = insert_a_departure(data_import_id: data_import_id, departure_time: departure_time) + + # find one departure in the next 10 minutes after 8am + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-01 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-01 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + + # find no departure after 8:10am + assert [] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-01 08:10:00Z], 10) + end + + test "handle calendar_dates exception (remove date)" do + departure_time = "08:05:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + %{service_id: service_id, stop_id: stop_id} = + insert_a_departure(data_import_id: data_import_id, departure_time: departure_time) + + # add exception for 2022-01-15 + :gtfs_calendar_dates + |> insert( + data_import_id: data_import_id, + service_id: service_id, + date: Date.new!(2022, 1, 15), + # exception_type 2 means "remove" + exception_type: 2 + ) + + # find one departure the 14th and the 16th + assert [_next_departure] = + Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-14 08:00:00Z], 10) + + assert [_next_departure] = + Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-16 08:00:00Z], 10) + + # find no departure the 15th + assert [] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-15 08:00:00Z], 10) + end + + test "handle calendar_dates exception (add date)" do + departure_time = "08:05:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + :gtfs_stop_times + |> insert( + data_import_id: data_import_id, + trip_id: trip_id = "trip_1", + departure_time: departure_time, + stop_id: stop_id = "stop_1" + ) + + :gtfs_trips + |> insert( + data_import_id: data_import_id, + service_id: service_id = "service_1", + trip_id: trip_id, + route_id: route_id = "route_1" + ) + + # add exception for 2022-01-15 + :gtfs_calendar_dates + |> insert( + data_import_id: data_import_id, + service_id: service_id, + date: Date.new!(2022, 1, 15), + # exception_type 1 means "add" + exception_type: 1 + ) + + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-15 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-15 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + end + + test "handle calendar_dates exception (add date) when there is an associated calendar" do + departure_time = "08:05:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + # sundays only + %{service_id: service_id, stop_id: stop_id, route_id: route_id, trip_id: trip_id} = + insert_a_departure(data_import_id: data_import_id, departure_time: departure_time, days: [7]) + + # add exception for 2022-01-15 (a saturday) + :gtfs_calendar_dates + |> insert( + data_import_id: data_import_id, + service_id: service_id, + date: Date.new!(2022, 1, 15), + # exception_type 1 means "add" + exception_type: 1 + ) + + # the added exception day + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-15 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-15 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + + # the regular schedule (a sunday) + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-16 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-16 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + + # nothing the next day (a monday) + assert [] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-17 08:00:00Z], 10) + end + + test "filter departures on data_import_id" do + departure_time_1 = "08:05:00" |> cast_binary_to_interval() + departure_time_2 = "08:06:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id_1} = :data_import |> insert(resource_history_id: resource_history_id) + %{id: data_import_id_2} = :data_import |> insert(resource_history_id: resource_history_id) + + %{service_id: service_id, stop_id: stop_id, route_id: route_id, trip_id: trip_id} = + insert_a_departure(data_import_id: data_import_id_1, departure_time: departure_time_1) + + insert_a_departure(data_import_id: data_import_id_2, departure_time: departure_time_2) + + # data_import_1 + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id_1, ~U[2022-01-15 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-15 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + + # data_import_2 + [next_departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id_2, ~U[2022-01-15 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-15 08:06:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = next_departure + end + + test "two departures from different trips on the same stop" do + departure_time_1 = "08:05:00" |> cast_binary_to_interval() + departure_time_2 = "08:06:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + %{service_id: service_id, stop_id: stop_id, route_id: route_id, trip_id: trip_id} = + insert_a_departure(data_import_id: data_import_id, departure_time: departure_time_1) + + %{service_id: service_id_2, route_id: route_id_2, trip_id: trip_id_2} = + insert_a_departure( + data_import_id: data_import_id, + departure_time: departure_time_2, + trip_id: "other_trip", + route_id: "other_route", + service_id: "service_2" + ) + + [departure1, departure2] = + Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-15 08:00:00Z], 10) + + assert %{ + "departure" => ~N[2022-01-15 08:05:00.000000], + "route_id" => ^route_id, + "service_id" => ^service_id, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id + } = departure1 + + assert %{ + "departure" => ~N[2022-01-15 08:06:00.000000], + "route_id" => ^route_id_2, + "service_id" => ^service_id_2, + "stop_id" => ^stop_id, + "trip_id" => ^trip_id_2 + } = departure2 + end + + test "a departure with a departure_time > 24:00" do + departure_time = "26:05:00" |> cast_binary_to_interval() + + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + %{stop_id: stop_id} = + insert_a_departure(data_import_id: data_import_id, departure_time: departure_time, days: [7]) + + # service_id is active on sundays (2022-01-16 for example) + # as the departure_time is > 24:00, the actual departure is the next day (2022-01-17 for example) + [departure] = Transport.GtfsQuery.next_departures(stop_id, data_import_id, ~U[2022-01-17 02:00:00Z], 10) + + # actual time is 26:05 - 24:00 => 02:05 + assert %{"departure" => ~N[2022-01-17 02:05:00.000000]} = departure + end + end +end diff --git a/apps/transport/test/transport/jobs/gtfs_to_db_test.exs b/apps/transport/test/transport/jobs/gtfs_to_db_test.exs new file mode 100644 index 0000000000..7920208fe3 --- /dev/null +++ b/apps/transport/test/transport/jobs/gtfs_to_db_test.exs @@ -0,0 +1,151 @@ +defmodule Transport.Jobs.GtfsToDBTest do + use ExUnit.Case + import DB.Factory + import Transport.Jobs.GtfsToDB + doctest Transport.Jobs.GtfsToDB, import: true + + setup do + Ecto.Adapters.SQL.Sandbox.checkout(DB.Repo) + end + + describe "import a GTFS file to the database" do + test "import stop_times" do + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + file_stream = stream_local_file("stop_times.txt", "#{__DIR__}/../../fixture/files/gtfs_import.zip") + stop_times_stream_insert(file_stream, data_import_id) + + assert [stop_time_1, stop_time_2] = DB.GtfsStopTimes |> DB.Repo.all() + + arrival_time = struct(Postgrex.Interval, cast_binary_to_interval("08:05:00")) + departure_time = struct(Postgrex.Interval, cast_binary_to_interval("08:06:00")) + + assert %{ + stop_id: "stop_1", + data_import_id: ^data_import_id, + stop_sequence: 1, + trip_id: "trip_1", + arrival_time: ^arrival_time, + departure_time: ^departure_time + } = stop_time_1 + + time = struct(Postgrex.Interval, cast_binary_to_interval("08:10:00")) + + assert %{ + stop_id: "stop_2", + data_import_id: ^data_import_id, + stop_sequence: 1, + trip_id: "trip_2", + arrival_time: ^time, + departure_time: ^time + } = stop_time_2 + end + + test "import stops" do + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + file_stream = stream_local_file("stops.txt", "#{__DIR__}/../../fixture/files/gtfs_import.zip") + stops_stream_insert(file_stream, data_import_id) + + assert [stop_1, stop_2] = DB.GtfsStops |> DB.Repo.all() + + assert %{ + stop_id: "stop_1", + data_import_id: ^data_import_id, + stop_name: "Frioul", + stop_lat: 43.28, + stop_lon: 5.3, + location_type: 1 + } = stop_1 + + assert %{ + stop_id: "stop_2", + data_import_id: ^data_import_id, + stop_name: "stop_name", + location_type: 2 + } = stop_2 + end + + test "import calendar" do + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + file_stream = stream_local_file("calendar.txt", "#{__DIR__}/../../fixture/files/gtfs_import.zip") + calendar_stream_insert(file_stream, data_import_id) + + assert [cal_1, cal_2] = DB.GtfsCalendar |> DB.Repo.all() + + assert %{ + service_id: "service_1", + data_import_id: ^data_import_id, + days: [1, 2, 3, 4, 5, 6, 7], + start_date: ~D[2022-01-01], + end_date: ~D[2022-01-31] + } = cal_1 + + assert %{ + service_id: "service_2", + data_import_id: ^data_import_id, + days: [1, 3, 7], + start_date: ~D[2022-06-01], + end_date: ~D[2023-01-31] + } = cal_2 + end + + test "import calendar dates" do + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + file_stream = stream_local_file("calendar_dates.txt", "#{__DIR__}/../../fixture/files/gtfs_import.zip") + calendar_dates_stream_insert(file_stream, data_import_id) + + assert [cald_1, cald_2] = DB.GtfsCalendarDates |> DB.Repo.all() + + assert %{ + service_id: "service_1", + data_import_id: ^data_import_id, + date: ~D[2022-01-15], + exception_type: 1 + } = cald_1 + + assert %{ + service_id: "service_2", + data_import_id: ^data_import_id, + date: ~D[2022-01-16], + exception_type: 2 + } = cald_2 + end + + test "import trips" do + %{id: resource_history_id} = :resource_history |> insert(payload: %{}) + %{id: data_import_id} = :data_import |> insert(resource_history_id: resource_history_id) + + file_stream = stream_local_file("trips.txt", "#{__DIR__}/../../fixture/files/gtfs_import.zip") + trips_stream_insert(file_stream, data_import_id) + + assert [trip_1, trip_2] = DB.GtfsTrips |> DB.Repo.all() + + assert %{ + route_id: "route_1", + service_id: "service_1", + trip_id: "trip_1", + data_import_id: ^data_import_id + } = trip_1 + + assert %{ + route_id: "route_2", + service_id: "service_2", + trip_id: "trip_2", + data_import_id: ^data_import_id + } = trip_2 + end + end + + def stream_local_file(file_name, zip_path) do + zip_file = Unzip.LocalFile.open(zip_path) + {:ok, unzip} = Unzip.new(zip_file) + Unzip.file_stream!(unzip, file_name) + end +end diff --git a/mix.lock b/mix.lock index 1d47d4c63c..5cffe4bb40 100644 --- a/mix.lock +++ b/mix.lock @@ -20,6 +20,7 @@ "earmark": {:hex, :earmark, "1.4.20", "d5097b1c7417a03c73a2985fcf01c3f72192c427b8a498719737dca5273938cb", [:mix], [{:earmark_parser, "== 1.4.18", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "7be744242dbde74c858279f4a65d9d31f37d163190d739340015c30038c1edb3"}, "earmark_parser": {:hex, :earmark_parser, "1.4.18", "e1b2be73eb08a49fb032a0208bf647380682374a725dfb5b9e510def8397f6f2", [:mix], [], "hexpm", "114a0e85ec3cf9e04b811009e73c206394ffecfcc313e0b346de0d557774ee97"}, "ecto": {:hex, :ecto, "3.7.1", "a20598862351b29f80f285b21ec5297da1181c0442687f9b8329f0445d228892", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d36e5b39fc479e654cffd4dbe1865d9716e4a9b6311faff799b6f90ab81b8638"}, + "ecto_interval": {:git, "https://github.com/etalab/ecto_interval", "b4e52a0a0adb3b97888f76d4f86433564754db59", [ref: "master"]}, "ecto_sql": {:hex, :ecto_sql, "3.7.2", "55c60aa3a06168912abf145c6df38b0295c34118c3624cf7a6977cd6ce043081", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.4.0 or ~> 0.5.0 or ~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c218ea62f305dcaef0b915fb56583195e7b91c91dcfb006ba1f669bfacbff2a"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "etag_plug": {:hex, :etag_plug, "1.0.0", "2252e16ac5da6bc5367907437bd760740bb209a7e73e821b756f6c9ea808d210", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "fcde90c1f7fa20c624036d4b5e023714f7945031bc8bcf03d1ec43bf462800c1"}, @@ -58,6 +59,7 @@ "mochiweb": {:hex, :mochiweb, "2.22.0", "f104d6747c01a330c38613561977e565b788b9170055c5241ac9dd6e4617cba5", [:rebar3], [], "hexpm", "cbbd1fd315d283c576d1c8a13e0738f6dafb63dc840611249608697502a07655"}, "mock": {:hex, :mock, "0.3.7", "75b3bbf1466d7e486ea2052a73c6e062c6256fb429d6797999ab02fa32f29e03", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "4da49a4609e41fd99b7836945c26f373623ea968cfb6282742bcb94440cf7e5c"}, "mox": {:hex, :mox, "1.0.1", "b651bf0113265cda0ba3a827fcb691f848b683c373b77e7d7439910a8d754d6e", [:mix], [], "hexpm", "35bc0dea5499d18db4ef7fe4360067a59b06c74376eb6ab3bd67e6295b133469"}, + "nimble_csv": {:hex, :nimble_csv, "1.2.0", "4e26385d260c61eba9d4412c71cea34421f296d5353f914afe3f2e71cce97722", [:mix], [], "hexpm", "d0628117fcc2148178b034044c55359b26966c6eaa8e2ce15777be3bbc91b12a"}, "nimble_options": {:hex, :nimble_options, "0.4.0", "c89babbab52221a24b8d1ff9e7d838be70f0d871be823165c94dd3418eea728f", [:mix], [], "hexpm", "e6701c1af326a11eea9634a3b1c62b475339ace9456c1a23ec3bc9a847bca02d"}, "nimble_pool": {:hex, :nimble_pool, "0.2.4", "1db8e9f8a53d967d595e0b32a17030cdb6c0dc4a451b8ac787bf601d3f7704c3", [:mix], [], "hexpm", "367e8071e137b787764e6a9992ccb57b276dc2282535f767a07d881951ebeac6"}, "oauth2": {:hex, :oauth2, "2.0.0", "338382079fe16c514420fa218b0903f8ad2d4bfc0ad0c9f988867dfa246731b0", [:mix], [{:hackney, "~> 1.13", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "881b8364ac7385f9fddc7949379cbe3f7081da37233a1aa7aab844670a91e7e7"},