diff --git a/.env b/.env index 5292ddb7..7f90f066 100644 --- a/.env +++ b/.env @@ -1,3 +1,4 @@ ## API DATABASE_URL=postgresql://stationapi:stationapi@localhost/stationapi -DISABLE_GRPC_WEB=false \ No newline at end of file +DISABLE_BUS_FEATURE=false +DISABLE_GRPC_WEB=false diff --git a/.gitignore b/.gitignore index 5eb88d26..63ad20a2 100644 --- a/.gitignore +++ b/.gitignore @@ -4,4 +4,5 @@ server.crt server.key api_descriptor.pb tmp.sql -.vscode/ \ No newline at end of file +.vscode/ +data/ToeiBus-GTFS/ \ No newline at end of file diff --git a/.sqlx/query-85c490dd8992db5950dfd6c141b02048f361c5e9ac82cb7503fd8e75266cc06f.json b/.sqlx/query-01a96a6f40ab08c532b4e7f3cc493906213e6ec80d495a2fe569ccf1d94b4834.json similarity index 93% rename from .sqlx/query-85c490dd8992db5950dfd6c141b02048f361c5e9ac82cb7503fd8e75266cc06f.json rename to .sqlx/query-01a96a6f40ab08c532b4e7f3cc493906213e6ec80d495a2fe569ccf1d94b4834.json index 9da35ea2..98d6d775 100644 --- a/.sqlx/query-85c490dd8992db5950dfd6c141b02048f361c5e9ac82cb7503fd8e75266cc06f.json +++ b/.sqlx/query-01a96a6f40ab08c532b4e7f3cc493906213e6ec80d495a2fe569ccf1d94b4834.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n CAST(NULL AS INTEGER) AS line_group_cd,\n CAST(NULL AS INTEGER) AS station_cd,\n CAST(NULL AS INTEGER) AS station_g_cd,\n CAST(NULL AS INTEGER) AS type_cd\n FROM lines AS l\n WHERE l.line_cd = $1\n AND l.e_status = 0", + "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n CAST(NULL AS INTEGER) AS line_group_cd,\n CAST(NULL AS INTEGER) AS station_cd,\n CAST(NULL AS INTEGER) AS station_g_cd,\n CAST(NULL AS INTEGER) AS type_cd,\n l.transport_type\n FROM lines AS l\n WHERE l.line_cd = $1\n AND l.e_status = 0", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "type_cd", "type_info": "Int4" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -183,8 +188,9 @@ null, null, null, - null + null, + false ] }, - "hash": "85c490dd8992db5950dfd6c141b02048f361c5e9ac82cb7503fd8e75266cc06f" + "hash": "01a96a6f40ab08c532b4e7f3cc493906213e6ec80d495a2fe569ccf1d94b4834" } diff --git a/.sqlx/query-7a7492b76cb13f18b36cc3b488c0b3448c8ca29cbdb5e6f2390d2b22b886f81e.json b/.sqlx/query-0bd668d4bac8ff6e3df4eff660f112043461a1461884f8461dfc6235208825a6.json similarity index 88% rename from .sqlx/query-7a7492b76cb13f18b36cc3b488c0b3448c8ca29cbdb5e6f2390d2b22b886f81e.json rename to .sqlx/query-0bd668d4bac8ff6e3df4eff660f112043461a1461884f8461dfc6235208825a6.json index d237db04..ef8b49ce 100644 --- a/.sqlx/query-7a7492b76cb13f18b36cc3b488c0b3448c8ca29cbdb5e6f2390d2b22b886f81e.json +++ b/.sqlx/query-0bd668d4bac8ff6e3df4eff660f112043461a1461884f8461dfc6235208825a6.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT DISTINCT l.line_cd,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.line_group_cd,\n sst.type_cd,\n s.station_cd,\n s.station_g_cd\n FROM lines AS l\n JOIN stations AS s ON s.station_g_cd = $1\n AND s.e_status = 0\n JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1\n WHERE l.line_cd = s.line_cd\n AND l.e_status = 0", + "query": "SELECT DISTINCT l.line_cd,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.line_group_cd,\n sst.type_cd,\n s.station_cd,\n s.station_g_cd,\n l.transport_type\n FROM lines AS l\n JOIN stations AS s ON s.station_g_cd = $1\n AND s.e_status = 0\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1\n WHERE l.line_cd = s.line_cd\n AND l.e_status = 0", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "station_g_cd", "type_info": "Int4" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -183,8 +188,9 @@ false, false, false, + false, false ] }, - "hash": "7a7492b76cb13f18b36cc3b488c0b3448c8ca29cbdb5e6f2390d2b22b886f81e" + "hash": "0bd668d4bac8ff6e3df4eff660f112043461a1461884f8461dfc6235208825a6" } diff --git a/.sqlx/query-c2be68e8c91643032540456639f9d1aaa9d6a8e3705162b694ac4d34bf5b4fbe.json b/.sqlx/query-16b301ca5df4be89b803987fd71cc41b55e885ccd4bd3ca4bcae1f7336742d3f.json similarity index 80% rename from .sqlx/query-c2be68e8c91643032540456639f9d1aaa9d6a8e3705162b694ac4d34bf5b4fbe.json rename to .sqlx/query-16b301ca5df4be89b803987fd71cc41b55e885ccd4bd3ca4bcae1f7336742d3f.json index 1a1598db..0d154880 100644 --- a/.sqlx/query-c2be68e8c91643032540456639f9d1aaa9d6a8e3705162b694ac4d34bf5b4fbe.json +++ b/.sqlx/query-16b301ca5df4be89b803987fd71cc41b55e885ccd4bd3ca4bcae1f7336742d3f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "WITH from_stations AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM stations AS s\n WHERE s.station_g_cd = $1\n AND s.e_status = 0\n ),\n filtered AS (\n SELECT DISTINCT ON (s.station_cd)\n s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(from_sst.line_group_cd, NULL)::int AS line_group_cd, -- has_train_types用\n NULL::int AS type_id,\n NULL::int AS sst_id,\n NULL::int AS type_cd,\n NULL::int AS pass,\n NULL::text AS type_name,\n NULL::text AS type_name_k,\n NULL::text AS type_name_r,\n NULL::text AS type_name_zh,\n NULL::text AS type_name_ko,\n NULL::text AS color,\n NULL::int AS direction,\n NULL::int AS kind\n FROM stations AS s\n LEFT JOIN from_stations AS fs\n ON fs.station_cd IS NOT NULL\n LEFT JOIN station_station_types AS from_sst\n ON from_sst.station_cd = fs.station_cd\n LEFT JOIN station_station_types AS dst_sst\n ON dst_sst.station_cd = s.station_cd\n LEFT JOIN types AS t\n ON t.type_cd = dst_sst.type_cd\n LEFT JOIN line_aliases AS la\n ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a\n ON la.alias_cd = a.id\n JOIN lines AS l\n ON l.line_cd = s.line_cd\n AND l.e_status = 0\n WHERE\n (\n s.station_name LIKE $2\n OR s.station_name_rn LIKE $3\n OR s.station_name_k LIKE $4\n OR s.station_name_zh LIKE $5\n OR s.station_name_ko LIKE $6\n )\n AND s.e_status = 0\n AND (\n (\n from_sst.id IS NOT NULL\n AND dst_sst.id IS NOT NULL\n AND from_sst.line_group_cd = dst_sst.line_group_cd\n AND dst_sst.pass <> 1\n )\n OR\n (\n (from_sst.id IS NULL OR dst_sst.id IS NULL)\n AND s.line_cd = COALESCE(fs.line_cd, s.line_cd)\n )\n )\n ORDER BY s.station_cd, s.station_g_cd, s.station_name\n )\n SELECT *\n FROM filtered\n ORDER BY station_g_cd, station_name\n LIMIT $7", + "query": "WITH from_stations AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM stations AS s\n WHERE s.station_g_cd = $1\n AND s.e_status = 0\n ),\n filtered AS (\n SELECT DISTINCT ON (s.station_cd)\n s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(from_sst.line_group_cd, NULL)::int AS line_group_cd, -- has_train_types用\n NULL::int AS type_id,\n NULL::int AS sst_id,\n NULL::int AS type_cd,\n NULL::int AS pass,\n NULL::text AS type_name,\n NULL::text AS type_name_k,\n NULL::text AS type_name_r,\n NULL::text AS type_name_zh,\n NULL::text AS type_name_ko,\n NULL::text AS color,\n NULL::int AS direction,\n NULL::int AS kind,\n s.transport_type\n FROM stations AS s\n LEFT JOIN from_stations AS fs\n ON fs.station_cd IS NOT NULL\n LEFT JOIN station_station_types AS from_sst\n ON from_sst.station_cd = fs.station_cd\n LEFT JOIN station_station_types AS dst_sst\n ON dst_sst.station_cd = s.station_cd\n LEFT JOIN types AS t\n ON t.type_cd = dst_sst.type_cd\n LEFT JOIN line_aliases AS la\n ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a\n ON la.alias_cd = a.id\n JOIN lines AS l\n ON l.line_cd = s.line_cd\n AND l.e_status = 0\n WHERE\n (\n s.station_name LIKE $2\n OR s.station_name_rn LIKE $3\n OR s.station_name_k LIKE $4\n OR s.station_name_zh LIKE $5\n OR s.station_name_ko LIKE $6\n )\n AND s.e_status = 0\n AND ($8::int IS NULL OR COALESCE(s.transport_type, 0) = $8)\n AND (\n (\n from_sst.id IS NOT NULL\n AND dst_sst.id IS NOT NULL\n AND from_sst.line_group_cd = dst_sst.line_group_cd\n AND dst_sst.pass <> 1\n )\n OR\n (\n (from_sst.id IS NULL OR dst_sst.id IS NULL)\n AND s.line_cd = COALESCE(fs.line_cd, s.line_cd)\n )\n )\n ORDER BY s.station_cd, s.station_g_cd, s.station_name\n )\n SELECT *\n FROM filtered\n ORDER BY station_g_cd, station_name\n LIMIT $7", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -302,7 +307,8 @@ "Text", "Text", "Text", - "Int8" + "Int8", + "Int4" ] }, "nullable": [ @@ -363,8 +369,9 @@ null, null, null, - null + null, + false ] }, - "hash": "c2be68e8c91643032540456639f9d1aaa9d6a8e3705162b694ac4d34bf5b4fbe" + "hash": "16b301ca5df4be89b803987fd71cc41b55e885ccd4bd3ca4bcae1f7336742d3f" } diff --git a/.sqlx/query-3ec853ec90f7ed1b17658087c6b57da19ad084925a8cfc39d91297548ae25f8c.json b/.sqlx/query-20f83c69618f5ad0ba1a7fc15e48ac7b77b61446b7c104121ae09b2371514eb2.json similarity index 86% rename from .sqlx/query-3ec853ec90f7ed1b17658087c6b57da19ad084925a8cfc39d91297548ae25f8c.json rename to .sqlx/query-20f83c69618f5ad0ba1a7fc15e48ac7b77b61446b7c104121ae09b2371514eb2.json index d84cc089..f8cb21f7 100644 --- a/.sqlx/query-3ec853ec90f7ed1b17658087c6b57da19ad084925a8cfc39d91297548ae25f8c.json +++ b/.sqlx/query-20f83c69618f5ad0ba1a7fc15e48ac7b77b61446b7c104121ae09b2371514eb2.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n s.station_cd,\n s.station_g_cd,\n sst.line_group_cd,\n sst.type_cd,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c\n FROM lines AS l\n JOIN station_station_types AS sst ON sst.line_group_cd = $1 AND sst.pass <> 1\n JOIN stations AS s ON s.station_cd = sst.station_cd\n AND s.e_status = 0\n AND l.line_cd = s.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON la.alias_cd = a.id\n WHERE l.e_status = 0", + "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n s.station_cd,\n s.station_g_cd,\n sst.line_group_cd,\n sst.type_cd,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.transport_type\n FROM lines AS l\n JOIN station_station_types AS sst ON sst.line_group_cd = $1 AND sst.pass <> 1\n JOIN stations AS s ON s.station_cd = sst.station_cd\n AND s.e_status = 0\n AND l.line_cd = s.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON la.alias_cd = a.id\n WHERE l.e_status = 0", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "line_color_c", "type_info": "Text" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -183,8 +188,9 @@ false, true, true, + false, false ] }, - "hash": "3ec853ec90f7ed1b17658087c6b57da19ad084925a8cfc39d91297548ae25f8c" + "hash": "20f83c69618f5ad0ba1a7fc15e48ac7b77b61446b7c104121ae09b2371514eb2" } diff --git a/.sqlx/query-4c514298af7222e5c7085857d72370a5eab2f9ac56bb450e0d2303807abfb849.json b/.sqlx/query-2217a8a08cc485bd86fde1e2312d80eeb5957224af02af9c634ef262e5d6a0e0.json similarity index 92% rename from .sqlx/query-4c514298af7222e5c7085857d72370a5eab2f9ac56bb450e0d2303807abfb849.json rename to .sqlx/query-2217a8a08cc485bd86fde1e2312d80eeb5957224af02af9c634ef262e5d6a0e0.json index 21751f83..44f81734 100644 --- a/.sqlx/query-4c514298af7222e5c7085857d72370a5eab2f9ac56bb450e0d2303807abfb849.json +++ b/.sqlx/query-2217a8a08cc485bd86fde1e2312d80eeb5957224af02af9c634ef262e5d6a0e0.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n t.id AS type_id,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind\n FROM stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE s.station_cd = $1\n AND s.e_status = 0\n AND l.e_status = 0\n LIMIT 1", + "query": "SELECT s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n t.id AS type_id,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind,\n s.transport_type\n FROM stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE s.station_cd = $1\n AND s.e_status = 0\n AND l.e_status = 0\n LIMIT 1", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -357,8 +362,9 @@ false, false, true, - true + true, + false ] }, - "hash": "4c514298af7222e5c7085857d72370a5eab2f9ac56bb450e0d2303807abfb849" + "hash": "2217a8a08cc485bd86fde1e2312d80eeb5957224af02af9c634ef262e5d6a0e0" } diff --git a/.sqlx/query-587f17667fd03f8f3007ef9e754a168a2a09d30a408f98fdbd6f2b1fa4562a8c.json b/.sqlx/query-587f17667fd03f8f3007ef9e754a168a2a09d30a408f98fdbd6f2b1fa4562a8c.json deleted file mode 100644 index 70ebc41c..00000000 --- a/.sqlx/query-587f17667fd03f8f3007ef9e754a168a2a09d30a408f98fdbd6f2b1fa4562a8c.json +++ /dev/null @@ -1,364 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT \n s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n NULL::int AS type_id,\n NULL::int AS sst_id,\n NULL::int AS type_cd,\n NULL::int AS line_group_cd,\n NULL::int AS pass,\n NULL::text AS type_name,\n NULL::text AS type_name_k,\n NULL::text AS type_name_r,\n NULL::text AS type_name_zh,\n NULL::text AS type_name_ko,\n NULL::text AS color,\n NULL::int AS direction,\n NULL::int AS kind\n FROM stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE l.line_cd = $1\n AND s.e_status = 0\n AND l.e_status = 0\n ORDER BY s.e_sort, s.station_cd ASC", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "station_cd", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "station_g_cd", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "station_name", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "station_name_k", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "station_name_r", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "station_name_rn", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "station_name_zh", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "station_name_ko", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "station_number1", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "station_number2", - "type_info": "Text" - }, - { - "ordinal": 10, - "name": "station_number3", - "type_info": "Text" - }, - { - "ordinal": 11, - "name": "station_number4", - "type_info": "Text" - }, - { - "ordinal": 12, - "name": "three_letter_code", - "type_info": "Text" - }, - { - "ordinal": 13, - "name": "line_cd", - "type_info": "Int4" - }, - { - "ordinal": 14, - "name": "pref_cd", - "type_info": "Int4" - }, - { - "ordinal": 15, - "name": "post", - "type_info": "Text" - }, - { - "ordinal": 16, - "name": "address", - "type_info": "Text" - }, - { - "ordinal": 17, - "name": "lon", - "type_info": "Float8" - }, - { - "ordinal": 18, - "name": "lat", - "type_info": "Float8" - }, - { - "ordinal": 19, - "name": "open_ymd", - "type_info": "Text" - }, - { - "ordinal": 20, - "name": "close_ymd", - "type_info": "Text" - }, - { - "ordinal": 21, - "name": "e_status", - "type_info": "Int4" - }, - { - "ordinal": 22, - "name": "e_sort", - "type_info": "Int4" - }, - { - "ordinal": 23, - "name": "company_cd", - "type_info": "Int4" - }, - { - "ordinal": 24, - "name": "line_name", - "type_info": "Text" - }, - { - "ordinal": 25, - "name": "line_name_k", - "type_info": "Text" - }, - { - "ordinal": 26, - "name": "line_name_h", - "type_info": "Text" - }, - { - "ordinal": 27, - "name": "line_name_r", - "type_info": "Text" - }, - { - "ordinal": 28, - "name": "line_name_zh", - "type_info": "Text" - }, - { - "ordinal": 29, - "name": "line_name_ko", - "type_info": "Text" - }, - { - "ordinal": 30, - "name": "line_color_c", - "type_info": "Text" - }, - { - "ordinal": 31, - "name": "line_type", - "type_info": "Int4" - }, - { - "ordinal": 32, - "name": "line_symbol1", - "type_info": "Text" - }, - { - "ordinal": 33, - "name": "line_symbol2", - "type_info": "Text" - }, - { - "ordinal": 34, - "name": "line_symbol3", - "type_info": "Text" - }, - { - "ordinal": 35, - "name": "line_symbol4", - "type_info": "Text" - }, - { - "ordinal": 36, - "name": "line_symbol1_color", - "type_info": "Text" - }, - { - "ordinal": 37, - "name": "line_symbol2_color", - "type_info": "Text" - }, - { - "ordinal": 38, - "name": "line_symbol3_color", - "type_info": "Text" - }, - { - "ordinal": 39, - "name": "line_symbol4_color", - "type_info": "Text" - }, - { - "ordinal": 40, - "name": "line_symbol1_shape", - "type_info": "Text" - }, - { - "ordinal": 41, - "name": "line_symbol2_shape", - "type_info": "Text" - }, - { - "ordinal": 42, - "name": "line_symbol3_shape", - "type_info": "Text" - }, - { - "ordinal": 43, - "name": "line_symbol4_shape", - "type_info": "Text" - }, - { - "ordinal": 44, - "name": "average_distance", - "type_info": "Float8" - }, - { - "ordinal": 45, - "name": "type_id", - "type_info": "Int4" - }, - { - "ordinal": 46, - "name": "sst_id", - "type_info": "Int4" - }, - { - "ordinal": 47, - "name": "type_cd", - "type_info": "Int4" - }, - { - "ordinal": 48, - "name": "line_group_cd", - "type_info": "Int4" - }, - { - "ordinal": 49, - "name": "pass", - "type_info": "Int4" - }, - { - "ordinal": 50, - "name": "type_name", - "type_info": "Text" - }, - { - "ordinal": 51, - "name": "type_name_k", - "type_info": "Text" - }, - { - "ordinal": 52, - "name": "type_name_r", - "type_info": "Text" - }, - { - "ordinal": 53, - "name": "type_name_zh", - "type_info": "Text" - }, - { - "ordinal": 54, - "name": "type_name_ko", - "type_info": "Text" - }, - { - "ordinal": 55, - "name": "color", - "type_info": "Text" - }, - { - "ordinal": 56, - "name": "direction", - "type_info": "Int4" - }, - { - "ordinal": 57, - "name": "kind", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - false, - false, - false, - false, - true, - true, - true, - true, - true, - true, - true, - true, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - null, - null, - null, - null, - null, - null, - null, - false, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - }, - "hash": "587f17667fd03f8f3007ef9e754a168a2a09d30a408f98fdbd6f2b1fa4562a8c" -} diff --git a/.sqlx/query-6711dff69e3c8d999743db687be404ee34f4b081e2662661873c44b6e3be1c43.json b/.sqlx/query-6a624360045e2278d6c1e08559918dcd026348e9304bba9584e72061034ac3db.json similarity index 91% rename from .sqlx/query-6711dff69e3c8d999743db687be404ee34f4b081e2662661873c44b6e3be1c43.json rename to .sqlx/query-6a624360045e2278d6c1e08559918dcd026348e9304bba9584e72061034ac3db.json index c2f7242b..7f837cee 100644 --- a/.sqlx/query-6711dff69e3c8d999743db687be404ee34f4b081e2662661873c44b6e3be1c43.json +++ b/.sqlx/query-6a624360045e2278d6c1e08559918dcd026348e9304bba9584e72061034ac3db.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT\n s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.id AS type_id,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind\n FROM stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd AND l.e_status = 0\n LEFT JOIN station_station_types AS sst ON sst.line_group_cd = $1\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n s.line_cd = l.line_cd\n AND s.station_cd = sst.station_cd\n AND s.e_status = 0\n ORDER BY sst.id", + "query": "SELECT\n s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.id AS type_id,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind,\n s.transport_type\n FROM stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd AND l.e_status = 0\n LEFT JOIN station_station_types AS sst ON sst.line_group_cd = $1\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n s.line_cd = l.line_cd\n AND s.station_cd = sst.station_cd\n AND s.e_status = 0\n ORDER BY sst.id", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -357,8 +362,9 @@ false, false, true, - true + true, + false ] }, - "hash": "6711dff69e3c8d999743db687be404ee34f4b081e2662661873c44b6e3be1c43" + "hash": "6a624360045e2278d6c1e08559918dcd026348e9304bba9584e72061034ac3db" } diff --git a/.sqlx/query-64004f2e2ea8f34594c230fe82d5502911473ecef28265350576cd8bee64f7f8.json b/.sqlx/query-792ccb86508366f1a36b6d43a6f9a5dc2535b80b9df16fbd2d308221b545813b.json similarity index 92% rename from .sqlx/query-64004f2e2ea8f34594c230fe82d5502911473ecef28265350576cd8bee64f7f8.json rename to .sqlx/query-792ccb86508366f1a36b6d43a6f9a5dc2535b80b9df16fbd2d308221b545813b.json index 36652345..cd3ab0a5 100644 --- a/.sqlx/query-64004f2e2ea8f34594c230fe82d5502911473ecef28265350576cd8bee64f7f8.json +++ b/.sqlx/query-792ccb86508366f1a36b6d43a6f9a5dc2535b80b9df16fbd2d308221b545813b.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "WITH\n from_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $1\n ),\n to_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $2\n ),\n common_lines AS (\n SELECT DISTINCT s1.line_cd\n FROM stations s1\n WHERE s1.station_g_cd = $3\n AND s1.e_status = 0\n AND ($5::int IS NULL OR s1.line_cd = $5)\n AND EXISTS (\n SELECT 1\n FROM stations s2\n WHERE s2.station_g_cd = $4\n AND s2.e_status = 0\n AND s2.line_cd = s1.line_cd\n )\n ),\n sst_cte_c1 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN from_cte ON sst.station_cd = from_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte_c2 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN to_cte ON sst.station_cd = to_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte AS (\n SELECT\n sst.id,\n sst.station_cd,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass\n FROM\n station_station_types AS sst\n JOIN sst_cte_c1 ON sst.line_group_cd = sst_cte_c1.line_group_cd\n JOIN sst_cte_c2 ON sst.line_group_cd = sst_cte_c2.line_group_cd\n )\n SELECT\n sta.station_cd,\n sta.station_g_cd,\n sta.station_name,\n sta.station_name_k,\n sta.station_name_r,\n sta.station_name_rn,\n sta.station_name_zh,\n sta.station_name_ko,\n sta.station_number1,\n sta.station_number2,\n sta.station_number3,\n sta.station_number4,\n sta.three_letter_code,\n sta.line_cd,\n sta.pref_cd,\n sta.post,\n sta.address,\n sta.lon,\n sta.lat,\n sta.open_ymd,\n sta.close_ymd,\n sta.e_status,\n sta.e_sort,\n lin.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, lin.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, lin.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, lin.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, lin.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, lin.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, lin.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, lin.line_color_c), ''), NULL) AS line_color_c,\n lin.line_type,\n lin.line_symbol1,\n lin.line_symbol2,\n lin.line_symbol3,\n lin.line_symbol4,\n lin.line_symbol1_color,\n lin.line_symbol2_color,\n lin.line_symbol3_color,\n lin.line_symbol4_color,\n lin.line_symbol1_shape,\n lin.line_symbol2_shape,\n lin.line_symbol3_shape,\n lin.line_symbol4_shape,\n COALESCE(lin.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(sst.line_group_cd, NULL)::int AS line_group_cd, -- has_train_types用\n NULL::int AS type_id,\n NULL::int AS sst_id,\n NULL::int AS type_cd,\n NULL::int AS pass,\n NULL::text AS type_name,\n NULL::text AS type_name_k,\n NULL::text AS type_name_r,\n NULL::text AS type_name_zh,\n NULL::text AS type_name_ko,\n NULL::text AS color,\n NULL::int AS direction,\n NULL::int AS kind\n FROM\n stations AS sta\n\t\t\t\tJOIN common_lines AS cl ON sta.line_cd = cl.line_cd\n\t\t\t\tJOIN lines AS lin ON lin.line_cd = cl.line_cd\n LEFT JOIN sst_cte AS sst ON sst.station_cd = sta.station_cd\n LEFT JOIN types AS tt ON tt.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = sta.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n sst.line_group_cd IS NULL\n AND lin.e_status = 0\n AND sta.e_status = 0\n ORDER BY sta.e_sort, sta.station_cd", + "query": "WITH\n from_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $1\n ),\n to_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $2\n ),\n common_lines AS (\n SELECT DISTINCT s1.line_cd\n FROM stations s1\n WHERE s1.station_g_cd = $3\n AND s1.e_status = 0\n AND ($5::int IS NULL OR s1.line_cd = $5)\n AND EXISTS (\n SELECT 1\n FROM stations s2\n WHERE s2.station_g_cd = $4\n AND s2.e_status = 0\n AND s2.line_cd = s1.line_cd\n )\n ),\n sst_cte_c1 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN from_cte ON sst.station_cd = from_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte_c2 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN to_cte ON sst.station_cd = to_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte AS (\n SELECT\n sst.id,\n sst.station_cd,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass\n FROM\n station_station_types AS sst\n JOIN sst_cte_c1 ON sst.line_group_cd = sst_cte_c1.line_group_cd\n JOIN sst_cte_c2 ON sst.line_group_cd = sst_cte_c2.line_group_cd\n )\n SELECT\n sta.station_cd,\n sta.station_g_cd,\n sta.station_name,\n sta.station_name_k,\n sta.station_name_r,\n sta.station_name_rn,\n sta.station_name_zh,\n sta.station_name_ko,\n sta.station_number1,\n sta.station_number2,\n sta.station_number3,\n sta.station_number4,\n sta.three_letter_code,\n sta.line_cd,\n sta.pref_cd,\n sta.post,\n sta.address,\n sta.lon,\n sta.lat,\n sta.open_ymd,\n sta.close_ymd,\n sta.e_status,\n sta.e_sort,\n lin.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, lin.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, lin.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, lin.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, lin.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, lin.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, lin.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, lin.line_color_c), ''), NULL) AS line_color_c,\n lin.line_type,\n lin.line_symbol1,\n lin.line_symbol2,\n lin.line_symbol3,\n lin.line_symbol4,\n lin.line_symbol1_color,\n lin.line_symbol2_color,\n lin.line_symbol3_color,\n lin.line_symbol4_color,\n lin.line_symbol1_shape,\n lin.line_symbol2_shape,\n lin.line_symbol3_shape,\n lin.line_symbol4_shape,\n COALESCE(lin.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(sst.line_group_cd, NULL)::int AS line_group_cd, -- has_train_types用\n NULL::int AS type_id,\n NULL::int AS sst_id,\n NULL::int AS type_cd,\n NULL::int AS pass,\n NULL::text AS type_name,\n NULL::text AS type_name_k,\n NULL::text AS type_name_r,\n NULL::text AS type_name_zh,\n NULL::text AS type_name_ko,\n NULL::text AS color,\n NULL::int AS direction,\n NULL::int AS kind,\n sta.transport_type\n FROM\n stations AS sta\n\t\t\t\tJOIN common_lines AS cl ON sta.line_cd = cl.line_cd\n\t\t\t\tJOIN lines AS lin ON lin.line_cd = cl.line_cd\n LEFT JOIN sst_cte AS sst ON sst.station_cd = sta.station_cd\n LEFT JOIN types AS tt ON tt.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = sta.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n sst.line_group_cd IS NULL\n AND lin.e_status = 0\n AND sta.e_status = 0\n ORDER BY sta.e_sort, sta.station_cd", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -361,8 +366,9 @@ null, null, null, - null + null, + false ] }, - "hash": "64004f2e2ea8f34594c230fe82d5502911473ecef28265350576cd8bee64f7f8" + "hash": "792ccb86508366f1a36b6d43a6f9a5dc2535b80b9df16fbd2d308221b545813b" } diff --git a/.sqlx/query-1ffa93b133de68b60a7e0a6663f212f613c175a35beb700a0001efb82f184d10.json b/.sqlx/query-7bc61cc6e96fcdaf7e9a05c8f76582a0470a8b5f5e61ffcd0609ce4c45300afb.json similarity index 88% rename from .sqlx/query-1ffa93b133de68b60a7e0a6663f212f613c175a35beb700a0001efb82f184d10.json rename to .sqlx/query-7bc61cc6e96fcdaf7e9a05c8f76582a0470a8b5f5e61ffcd0609ce4c45300afb.json index 266b4e8b..ab5119f0 100644 --- a/.sqlx/query-1ffa93b133de68b60a7e0a6663f212f613c175a35beb700a0001efb82f184d10.json +++ b/.sqlx/query-7bc61cc6e96fcdaf7e9a05c8f76582a0470a8b5f5e61ffcd0609ce4c45300afb.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n CAST(NULL AS INTEGER) AS line_group_cd,\n CAST(NULL AS INTEGER) AS station_cd,\n CAST(NULL AS INTEGER) AS station_g_cd,\n CAST(NULL AS INTEGER) AS type_cd\n FROM lines AS l\n WHERE (\n l.line_name LIKE $1\n OR l.line_name_rn LIKE $2\n OR l.line_name_k LIKE $3\n OR l.line_name_zh LIKE $4\n OR l.line_name_ko LIKE $5\n )\n AND l.e_status = 0\n LIMIT $6", + "query": "SELECT \n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n CAST(NULL AS INTEGER) AS line_group_cd,\n CAST(NULL AS INTEGER) AS station_cd,\n CAST(NULL AS INTEGER) AS station_g_cd,\n CAST(NULL AS INTEGER) AS type_cd,\n l.transport_type\n FROM lines AS l\n WHERE (\n l.line_name LIKE $1\n OR l.line_name_rn LIKE $2\n OR l.line_name_k LIKE $3\n OR l.line_name_zh LIKE $4\n OR l.line_name_ko LIKE $5\n )\n AND l.e_status = 0\n LIMIT $6", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "type_cd", "type_info": "Int4" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -188,8 +193,9 @@ null, null, null, - null + null, + false ] }, - "hash": "1ffa93b133de68b60a7e0a6663f212f613c175a35beb700a0001efb82f184d10" + "hash": "7bc61cc6e96fcdaf7e9a05c8f76582a0470a8b5f5e61ffcd0609ce4c45300afb" } diff --git a/.sqlx/query-648214ea287859e20e4788c48dff2af34f7c31069f036e3a3cfa407f31a9fee5.json b/.sqlx/query-90906c9a7d97f432f66f1722a6a51731194922c4cb16f8809ae1e2e711d5a0da.json similarity index 91% rename from .sqlx/query-648214ea287859e20e4788c48dff2af34f7c31069f036e3a3cfa407f31a9fee5.json rename to .sqlx/query-90906c9a7d97f432f66f1722a6a51731194922c4cb16f8809ae1e2e711d5a0da.json index 182ae683..d9b3af42 100644 --- a/.sqlx/query-648214ea287859e20e4788c48dff2af34f7c31069f036e3a3cfa407f31a9fee5.json +++ b/.sqlx/query-90906c9a7d97f432f66f1722a6a51731194922c4cb16f8809ae1e2e711d5a0da.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.id AS type_id,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind\n FROM\n stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n s.station_g_cd = $1\n AND s.line_cd = l.line_cd\n AND s.e_status = 0\n AND l.e_status = 0", + "query": "SELECT s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n t.id AS type_id,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.color,\n t.direction,\n t.kind,\n s.transport_type\n FROM\n stations AS s\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd\n LEFT JOIN types AS t ON t.type_cd = sst.type_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n s.station_g_cd = $1\n AND s.line_cd = l.line_cd\n AND s.e_status = 0\n AND l.e_status = 0", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -357,8 +362,9 @@ false, false, true, - true + true, + false ] }, - "hash": "648214ea287859e20e4788c48dff2af34f7c31069f036e3a3cfa407f31a9fee5" + "hash": "90906c9a7d97f432f66f1722a6a51731194922c4cb16f8809ae1e2e711d5a0da" } diff --git a/.sqlx/query-20b55238238b1655930a5716441ff4944d4e572e97654fc65cb425e5435d337b.json b/.sqlx/query-9c6aff223f1912290cdda35d4cd8c9f3d26fc0efece17de6390f06ea9935241f.json similarity index 93% rename from .sqlx/query-20b55238238b1655930a5716441ff4944d4e572e97654fc65cb425e5435d337b.json rename to .sqlx/query-9c6aff223f1912290cdda35d4cd8c9f3d26fc0efece17de6390f06ea9935241f.json index 74dfa4da..b0f8b243 100644 --- a/.sqlx/query-20b55238238b1655930a5716441ff4944d4e572e97654fc65cb425e5435d337b.json +++ b/.sqlx/query-9c6aff223f1912290cdda35d4cd8c9f3d26fc0efece17de6390f06ea9935241f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "WITH\n from_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $1\n AND s.e_status = 0\n ),\n to_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $2\n AND s.e_status = 0\n ),\n sst_cte_c1 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN from_cte ON sst.station_cd = from_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte_c2 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN to_cte ON sst.station_cd = to_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte AS (\n SELECT\n sst.id,\n sst.station_cd,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass\n FROM\n station_station_types AS sst\n JOIN sst_cte_c1 ON sst.line_group_cd = sst_cte_c1.line_group_cd\n JOIN sst_cte_c2 ON sst.line_group_cd = sst_cte_c2.line_group_cd\n )\n SELECT\n sta.station_cd,\n sta.station_g_cd,\n sta.station_name,\n sta.station_name_k,\n sta.station_name_r,\n sta.station_name_rn,\n sta.station_name_zh,\n sta.station_name_ko,\n sta.station_number1,\n sta.station_number2,\n sta.station_number3,\n sta.station_number4,\n sta.three_letter_code,\n sta.line_cd,\n sta.pref_cd,\n sta.post,\n sta.address,\n sta.lon,\n sta.lat,\n sta.open_ymd,\n sta.close_ymd,\n sta.e_status,\n sta.e_sort,\n lin.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, lin.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, lin.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, lin.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, lin.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, lin.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, lin.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, lin.line_color_c), ''), NULL) AS line_color_c,\n lin.line_type,\n lin.line_symbol1,\n lin.line_symbol2,\n lin.line_symbol3,\n lin.line_symbol4,\n lin.line_symbol1_color,\n lin.line_symbol2_color,\n lin.line_symbol3_color,\n lin.line_symbol4_color,\n lin.line_symbol1_shape,\n lin.line_symbol2_shape,\n lin.line_symbol3_shape,\n lin.line_symbol4_shape,\n COALESCE(lin.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n tt.id AS type_id,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n tt.type_name,\n tt.type_name_k,\n tt.type_name_r,\n tt.type_name_zh,\n tt.type_name_ko,\n tt.color,\n tt.direction,\n tt.kind\n FROM\n stations AS sta\n LEFT JOIN sst_cte AS sst ON sst.station_cd = sta.station_cd\n JOIN types AS tt ON tt.type_cd = sst.type_cd\n JOIN lines AS lin ON lin.line_cd = sta.line_cd AND lin.e_status = 0\n LEFT JOIN line_aliases AS la ON la.station_cd = sta.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n sta.e_status = 0\n AND ($3::int IS NULL OR sta.line_cd = $3)\n ORDER BY sst.id", + "query": "WITH\n from_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $1\n AND s.e_status = 0\n ),\n to_cte AS (\n SELECT\n s.station_cd,\n s.line_cd\n FROM\n stations AS s\n WHERE\n s.station_g_cd = $2\n AND s.e_status = 0\n ),\n sst_cte_c1 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN from_cte ON sst.station_cd = from_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte_c2 AS (\n SELECT\n sst.line_group_cd\n FROM\n station_station_types AS sst\n JOIN to_cte ON sst.station_cd = to_cte.station_cd\n WHERE\n sst.pass <> 1\n ),\n sst_cte AS (\n SELECT\n sst.id,\n sst.station_cd,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass\n FROM\n station_station_types AS sst\n JOIN sst_cte_c1 ON sst.line_group_cd = sst_cte_c1.line_group_cd\n JOIN sst_cte_c2 ON sst.line_group_cd = sst_cte_c2.line_group_cd\n )\n SELECT\n sta.station_cd,\n sta.station_g_cd,\n sta.station_name,\n sta.station_name_k,\n sta.station_name_r,\n sta.station_name_rn,\n sta.station_name_zh,\n sta.station_name_ko,\n sta.station_number1,\n sta.station_number2,\n sta.station_number3,\n sta.station_number4,\n sta.three_letter_code,\n sta.line_cd,\n sta.pref_cd,\n sta.post,\n sta.address,\n sta.lon,\n sta.lat,\n sta.open_ymd,\n sta.close_ymd,\n sta.e_status,\n sta.e_sort,\n lin.company_cd,\n COALESCE(NULLIF(COALESCE(a.line_name, lin.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, lin.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, lin.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, lin.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, lin.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, lin.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, lin.line_color_c), ''), NULL) AS line_color_c,\n lin.line_type,\n lin.line_symbol1,\n lin.line_symbol2,\n lin.line_symbol3,\n lin.line_symbol4,\n lin.line_symbol1_color,\n lin.line_symbol2_color,\n lin.line_symbol3_color,\n lin.line_symbol4_color,\n lin.line_symbol1_shape,\n lin.line_symbol2_shape,\n lin.line_symbol3_shape,\n lin.line_symbol4_shape,\n COALESCE(lin.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n tt.id AS type_id,\n sst.id AS sst_id,\n sst.type_cd,\n sst.line_group_cd,\n sst.pass,\n tt.type_name,\n tt.type_name_k,\n tt.type_name_r,\n tt.type_name_zh,\n tt.type_name_ko,\n tt.color,\n tt.direction,\n tt.kind,\n sta.transport_type\n FROM\n stations AS sta\n LEFT JOIN sst_cte AS sst ON sst.station_cd = sta.station_cd\n JOIN types AS tt ON tt.type_cd = sst.type_cd\n JOIN lines AS lin ON lin.line_cd = sta.line_cd AND lin.e_status = 0\n LEFT JOIN line_aliases AS la ON la.station_cd = sta.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE\n sta.e_status = 0\n AND ($3::int IS NULL OR sta.line_cd = $3)\n ORDER BY sst.id", "describe": { "columns": [ { @@ -292,6 +292,11 @@ "ordinal": 57, "name": "kind", "type_info": "Int4" + }, + { + "ordinal": 58, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -359,8 +364,9 @@ false, false, true, - true + true, + false ] }, - "hash": "20b55238238b1655930a5716441ff4944d4e572e97654fc65cb425e5435d337b" + "hash": "9c6aff223f1912290cdda35d4cd8c9f3d26fc0efece17de6390f06ea9935241f" } diff --git a/.sqlx/query-b4769060ba658a2746a186f46d4ca4f0f690fe2fd7521ee8645193b364b6896a.json b/.sqlx/query-b4769060ba658a2746a186f46d4ca4f0f690fe2fd7521ee8645193b364b6896a.json deleted file mode 100644 index 3805034e..00000000 --- a/.sqlx/query-b4769060ba658a2746a186f46d4ca4f0f690fe2fd7521ee8645193b364b6896a.json +++ /dev/null @@ -1,364 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "WITH target_line_group AS (\n SELECT sst_inner.line_group_cd\n FROM station_station_types AS sst_inner\n LEFT JOIN types AS t_inner ON sst_inner.type_cd = t_inner.type_cd\n WHERE sst_inner.station_cd = $1\n AND (\n (t_inner.priority > 0 AND sst_inner.pass <> 1 AND sst_inner.type_cd = t_inner.type_cd)\n OR (NOT (t_inner.priority > 0 AND sst_inner.pass <> 1) AND t_inner.kind IN (0,1))\n )\n ORDER BY t_inner.priority DESC\n LIMIT 1\n )\n SELECT s.station_cd,\n s.station_g_cd,\n s.station_name,\n s.station_name_k,\n s.station_name_r,\n s.station_name_rn,\n s.station_name_zh,\n s.station_name_ko,\n s.station_number1,\n s.station_number2,\n s.station_number3,\n s.station_number4,\n s.three_letter_code,\n s.line_cd,\n s.pref_cd,\n s.post,\n s.address,\n s.lon,\n s.lat,\n s.open_ymd,\n s.close_ymd,\n s.e_status,\n s.e_sort,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n COALESCE(NULLIF(COALESCE(a.line_name, l.line_name), ''), NULL) AS line_name,\n COALESCE(NULLIF(COALESCE(a.line_name_k, l.line_name_k), ''), NULL) AS line_name_k,\n COALESCE(NULLIF(COALESCE(a.line_name_h, l.line_name_h), ''), NULL) AS line_name_h,\n COALESCE(NULLIF(COALESCE(a.line_name_r, l.line_name_r), ''), NULL) AS line_name_r,\n COALESCE(NULLIF(COALESCE(a.line_name_zh, l.line_name_zh), ''), NULL) AS line_name_zh,\n COALESCE(NULLIF(COALESCE(a.line_name_ko, l.line_name_ko), ''), NULL) AS line_name_ko,\n COALESCE(NULLIF(COALESCE(a.line_color_c, l.line_color_c), ''), NULL) AS line_color_c,\n t.id AS type_id,\n t.type_cd,\n t.color,\n t.type_name,\n t.type_name_k,\n t.type_name_r,\n t.type_name_zh,\n t.type_name_ko,\n t.direction,\n t.kind,\n sst.id AS sst_id,\n sst.line_group_cd,\n sst.pass\n FROM stations AS s\n JOIN station_station_types AS sst ON sst.line_group_cd = (SELECT line_group_cd FROM target_line_group) AND sst.station_cd = s.station_cd\n JOIN types AS t ON t.type_cd = sst.type_cd\n JOIN lines AS l ON l.line_cd = s.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON a.id = la.alias_cd\n WHERE s.e_status = 0\n AND l.e_status = 0\n ORDER BY sst.id", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "station_cd", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "station_g_cd", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "station_name", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "station_name_k", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "station_name_r", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "station_name_rn", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "station_name_zh", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "station_name_ko", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "station_number1", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "station_number2", - "type_info": "Text" - }, - { - "ordinal": 10, - "name": "station_number3", - "type_info": "Text" - }, - { - "ordinal": 11, - "name": "station_number4", - "type_info": "Text" - }, - { - "ordinal": 12, - "name": "three_letter_code", - "type_info": "Text" - }, - { - "ordinal": 13, - "name": "line_cd", - "type_info": "Int4" - }, - { - "ordinal": 14, - "name": "pref_cd", - "type_info": "Int4" - }, - { - "ordinal": 15, - "name": "post", - "type_info": "Text" - }, - { - "ordinal": 16, - "name": "address", - "type_info": "Text" - }, - { - "ordinal": 17, - "name": "lon", - "type_info": "Float8" - }, - { - "ordinal": 18, - "name": "lat", - "type_info": "Float8" - }, - { - "ordinal": 19, - "name": "open_ymd", - "type_info": "Text" - }, - { - "ordinal": 20, - "name": "close_ymd", - "type_info": "Text" - }, - { - "ordinal": 21, - "name": "e_status", - "type_info": "Int4" - }, - { - "ordinal": 22, - "name": "e_sort", - "type_info": "Int4" - }, - { - "ordinal": 23, - "name": "company_cd", - "type_info": "Int4" - }, - { - "ordinal": 24, - "name": "line_type", - "type_info": "Int4" - }, - { - "ordinal": 25, - "name": "line_symbol1", - "type_info": "Text" - }, - { - "ordinal": 26, - "name": "line_symbol2", - "type_info": "Text" - }, - { - "ordinal": 27, - "name": "line_symbol3", - "type_info": "Text" - }, - { - "ordinal": 28, - "name": "line_symbol4", - "type_info": "Text" - }, - { - "ordinal": 29, - "name": "line_symbol1_color", - "type_info": "Text" - }, - { - "ordinal": 30, - "name": "line_symbol2_color", - "type_info": "Text" - }, - { - "ordinal": 31, - "name": "line_symbol3_color", - "type_info": "Text" - }, - { - "ordinal": 32, - "name": "line_symbol4_color", - "type_info": "Text" - }, - { - "ordinal": 33, - "name": "line_symbol1_shape", - "type_info": "Text" - }, - { - "ordinal": 34, - "name": "line_symbol2_shape", - "type_info": "Text" - }, - { - "ordinal": 35, - "name": "line_symbol3_shape", - "type_info": "Text" - }, - { - "ordinal": 36, - "name": "line_symbol4_shape", - "type_info": "Text" - }, - { - "ordinal": 37, - "name": "average_distance", - "type_info": "Float8" - }, - { - "ordinal": 38, - "name": "line_name", - "type_info": "Text" - }, - { - "ordinal": 39, - "name": "line_name_k", - "type_info": "Text" - }, - { - "ordinal": 40, - "name": "line_name_h", - "type_info": "Text" - }, - { - "ordinal": 41, - "name": "line_name_r", - "type_info": "Text" - }, - { - "ordinal": 42, - "name": "line_name_zh", - "type_info": "Text" - }, - { - "ordinal": 43, - "name": "line_name_ko", - "type_info": "Text" - }, - { - "ordinal": 44, - "name": "line_color_c", - "type_info": "Text" - }, - { - "ordinal": 45, - "name": "type_id", - "type_info": "Int4" - }, - { - "ordinal": 46, - "name": "type_cd", - "type_info": "Int4" - }, - { - "ordinal": 47, - "name": "color", - "type_info": "Text" - }, - { - "ordinal": 48, - "name": "type_name", - "type_info": "Text" - }, - { - "ordinal": 49, - "name": "type_name_k", - "type_info": "Text" - }, - { - "ordinal": 50, - "name": "type_name_r", - "type_info": "Text" - }, - { - "ordinal": 51, - "name": "type_name_zh", - "type_info": "Text" - }, - { - "ordinal": 52, - "name": "type_name_ko", - "type_info": "Text" - }, - { - "ordinal": 53, - "name": "direction", - "type_info": "Int4" - }, - { - "ordinal": 54, - "name": "kind", - "type_info": "Int4" - }, - { - "ordinal": 55, - "name": "sst_id", - "type_info": "Int4" - }, - { - "ordinal": 56, - "name": "line_group_cd", - "type_info": "Int4" - }, - { - "ordinal": 57, - "name": "pass", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - false, - false, - false, - false, - true, - true, - true, - true, - true, - true, - true, - true, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - true, - null, - null, - null, - null, - null, - null, - null, - null, - false, - false, - false, - false, - false, - false, - false, - false, - true, - true, - false, - false, - true - ] - }, - "hash": "b4769060ba658a2746a186f46d4ca4f0f690fe2fd7521ee8645193b364b6896a" -} diff --git a/.sqlx/query-472b7d0481e1741654cd242d81993c2f99a817c57622e32aa4b9de36eddd33dd.json b/.sqlx/query-c08e746f79517801e8a235b4cbc4377f9addfa50f0a0b9ac5e2dacad55fb9a97.json similarity index 85% rename from .sqlx/query-472b7d0481e1741654cd242d81993c2f99a817c57622e32aa4b9de36eddd33dd.json rename to .sqlx/query-c08e746f79517801e8a235b4cbc4377f9addfa50f0a0b9ac5e2dacad55fb9a97.json index 617c8a54..ea859939 100644 --- a/.sqlx/query-472b7d0481e1741654cd242d81993c2f99a817c57622e32aa4b9de36eddd33dd.json +++ b/.sqlx/query-c08e746f79517801e8a235b4cbc4377f9addfa50f0a0b9ac5e2dacad55fb9a97.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT DISTINCT ON (sst.id, l.line_cd)\n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.line_group_cd,\n sst.type_cd,\n s.station_cd,\n s.station_g_cd\n FROM lines AS l\n JOIN station_station_types AS sst ON sst.line_group_cd = ANY($1) AND sst.pass <> 1\n JOIN stations AS s ON s.station_cd = sst.station_cd AND s.e_status = 0 AND s.line_cd = l.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON la.alias_cd = a.id\n WHERE l.e_status = 0\n ORDER BY sst.id, l.line_cd", + "query": "SELECT DISTINCT ON (sst.id, l.line_cd)\n l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_name,\n l.line_name_k,\n l.line_name_h,\n l.line_name_r,\n l.line_name_zh,\n l.line_name_ko,\n l.line_color_c,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n sst.line_group_cd,\n sst.type_cd,\n s.station_cd,\n s.station_g_cd,\n l.transport_type\n FROM lines AS l\n JOIN station_station_types AS sst ON sst.line_group_cd = ANY($1) AND sst.pass <> 1\n JOIN stations AS s ON s.station_cd = sst.station_cd AND s.e_status = 0 AND s.line_cd = l.line_cd\n LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd\n LEFT JOIN aliases AS a ON la.alias_cd = a.id\n WHERE l.e_status = 0\n ORDER BY sst.id, l.line_cd", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "station_g_cd", "type_info": "Int4" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -183,8 +188,9 @@ false, false, false, + false, false ] }, - "hash": "472b7d0481e1741654cd242d81993c2f99a817c57622e32aa4b9de36eddd33dd" + "hash": "c08e746f79517801e8a235b4cbc4377f9addfa50f0a0b9ac5e2dacad55fb9a97" } diff --git a/.sqlx/query-0a1c6e15355fdf6363a083a43fec27a04b91b946ae7fdc192a721f9b6923e690.json b/.sqlx/query-d3f9188af86adfa1b16ea4ce538299c4e0942aeadc2497c7752b6b98fb78087f.json similarity index 80% rename from .sqlx/query-0a1c6e15355fdf6363a083a43fec27a04b91b946ae7fdc192a721f9b6923e690.json rename to .sqlx/query-d3f9188af86adfa1b16ea4ce538299c4e0942aeadc2497c7752b6b98fb78087f.json index 92e94f94..54d2fc59 100644 --- a/.sqlx/query-0a1c6e15355fdf6363a083a43fec27a04b91b946ae7fdc192a721f9b6923e690.json +++ b/.sqlx/query-d3f9188af86adfa1b16ea4ce538299c4e0942aeadc2497c7752b6b98fb78087f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n s.station_cd,\n s.station_g_cd,\n sst.line_group_cd,\n sst.type_cd,\n COALESCE(alias_data.line_name, l.line_name) AS line_name,\n COALESCE(alias_data.line_name_k, l.line_name_k) AS line_name_k,\n COALESCE(alias_data.line_name_h, l.line_name_h) AS line_name_h,\n COALESCE(alias_data.line_name_r, l.line_name_r) AS line_name_r,\n COALESCE(alias_data.line_name_zh, l.line_name_zh) AS line_name_zh,\n COALESCE(alias_data.line_name_ko, l.line_name_ko) AS line_name_ko,\n COALESCE(alias_data.line_color_c, l.line_color_c) AS line_color_c\n FROM lines AS l\n JOIN stations AS s ON s.station_cd = $1\n JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1\n LEFT JOIN (\n SELECT DISTINCT ON (la.station_cd) \n la.station_cd,\n a.line_name,\n a.line_name_k,\n a.line_name_h,\n a.line_name_r,\n a.line_name_zh,\n a.line_name_ko,\n a.line_color_c\n FROM line_aliases AS la\n JOIN aliases AS a ON la.alias_cd = a.id\n WHERE la.station_cd = $1\n LIMIT 1\n ) AS alias_data ON alias_data.station_cd = s.station_cd\n WHERE l.line_cd = s.line_cd", + "query": "SELECT l.line_cd,\n l.company_cd,\n l.line_type,\n l.line_symbol1,\n l.line_symbol2,\n l.line_symbol3,\n l.line_symbol4,\n l.line_symbol1_color,\n l.line_symbol2_color,\n l.line_symbol3_color,\n l.line_symbol4_color,\n l.line_symbol1_shape,\n l.line_symbol2_shape,\n l.line_symbol3_shape,\n l.line_symbol4_shape,\n l.e_status,\n l.e_sort,\n COALESCE(l.average_distance, 0.0)::DOUBLE PRECISION AS average_distance,\n s.station_cd,\n s.station_g_cd,\n sst.line_group_cd,\n sst.type_cd,\n COALESCE(alias_data.line_name, l.line_name) AS line_name,\n COALESCE(alias_data.line_name_k, l.line_name_k) AS line_name_k,\n COALESCE(alias_data.line_name_h, l.line_name_h) AS line_name_h,\n COALESCE(alias_data.line_name_r, l.line_name_r) AS line_name_r,\n COALESCE(alias_data.line_name_zh, l.line_name_zh) AS line_name_zh,\n COALESCE(alias_data.line_name_ko, l.line_name_ko) AS line_name_ko,\n COALESCE(alias_data.line_color_c, l.line_color_c) AS line_color_c,\n l.transport_type\n FROM lines AS l\n JOIN stations AS s ON s.station_cd = $1\n LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1\n LEFT JOIN (\n SELECT DISTINCT ON (la.station_cd)\n la.station_cd,\n a.line_name,\n a.line_name_k,\n a.line_name_h,\n a.line_name_r,\n a.line_name_zh,\n a.line_name_ko,\n a.line_color_c\n FROM line_aliases AS la\n JOIN aliases AS a ON la.alias_cd = a.id\n WHERE la.station_cd = $1\n LIMIT 1\n ) AS alias_data ON alias_data.station_cd = s.station_cd\n WHERE l.line_cd = s.line_cd", "describe": { "columns": [ { @@ -147,6 +147,11 @@ "ordinal": 28, "name": "line_color_c", "type_info": "Text" + }, + { + "ordinal": 29, + "name": "transport_type", + "type_info": "Int4" } ], "parameters": { @@ -183,8 +188,9 @@ null, null, null, - null + null, + false ] }, - "hash": "0a1c6e15355fdf6363a083a43fec27a04b91b946ae7fdc192a721f9b6923e690" + "hash": "d3f9188af86adfa1b16ea4ce538299c4e0942aeadc2497c7752b6b98fb78087f" } diff --git a/Cargo.lock b/Cargo.lock index dd2a61ed..6f6d267f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,23 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + [[package]] name = "aho-corasick" version = "1.1.3" @@ -32,12 +49,30 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anyhow" version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" +dependencies = [ + "derive_arbitrary", +] + [[package]] name = "async-stream" version = "0.3.5" @@ -114,7 +149,7 @@ dependencies = [ "rustversion", "serde", "sync_wrapper", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", ] @@ -149,7 +184,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.2", "object", "rustc-demangle", ] @@ -190,6 +225,12 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bumpalo" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" + [[package]] name = "byteorder" version = "1.5.0" @@ -202,12 +243,33 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +[[package]] +name = "bzip2" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" +dependencies = [ + "bzip2-sys", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +dependencies = [ + "cc", + "pkg-config", +] + [[package]] name = "cc" version = "1.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" dependencies = [ + "jobserver", + "libc", "shlex", ] @@ -217,6 +279,35 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "wasm-bindgen", + "windows-link 0.2.1", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -232,6 +323,12 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + [[package]] name = "core-foundation" version = "0.9.4" @@ -272,6 +369,15 @@ version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-queue" version = "0.3.11" @@ -283,9 +389,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" @@ -325,6 +431,12 @@ dependencies = [ "csv", ] +[[package]] +name = "deflate64" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26bf8fc351c5ed29b5c2f0cbbac1b209b74f60ecd62e675a998df72c49af5204" + [[package]] name = "der" version = "0.7.10" @@ -336,6 +448,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "deranged" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "digest" version = "0.10.7" @@ -348,6 +480,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dotenv" version = "0.15.0" @@ -425,6 +568,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "flate2" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" +dependencies = [ + "crc32fast", + "miniz_oxide 0.8.9", +] + [[package]] name = "flume" version = "0.11.1" @@ -561,8 +714,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -572,9 +727,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", ] [[package]] @@ -740,6 +897,23 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots 1.0.4", +] + [[package]] name = "hyper-timeout" version = "0.5.2" @@ -772,6 +946,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "iana-time-zone" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "idna" version = "0.5.0" @@ -802,6 +1000,21 @@ dependencies = [ "hashbrown 0.15.2", ] +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "generic-array", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + [[package]] name = "itertools" version = "0.12.1" @@ -817,6 +1030,26 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.2", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + [[package]] name = "lazy_static" version = "1.4.0" @@ -866,9 +1099,36 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "lzma-rs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" +dependencies = [ + "byteorder", + "crc", +] + +[[package]] +name = "lzma-sys" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" +dependencies = [ + "cc", + "libc", + "pkg-config", +] [[package]] name = "matchit" @@ -888,9 +1148,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "mime" @@ -907,6 +1167,16 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "mio" version = "0.8.11" @@ -963,11 +1233,17 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand", + "rand 0.8.5", "smallvec", "zeroize", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-integer" version = "0.1.46" @@ -1102,6 +1378,16 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest", + "hmac", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -1182,9 +1468,15 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" @@ -1264,6 +1556,61 @@ dependencies = [ "prost", ] +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "bytes", + "getrandom 0.3.2", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.52.0", +] + [[package]] name = "quote" version = "1.0.37" @@ -1286,8 +1633,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", ] [[package]] @@ -1297,7 +1654,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", ] [[package]] @@ -1309,6 +1676,15 @@ dependencies = [ "getrandom 0.2.11", ] +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.2", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -1348,31 +1724,95 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] -name = "rsa" -version = "0.9.8" +name = "reqwest" +version = "0.12.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" dependencies = [ - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core", - "signature", - "spki", - "subtle", - "zeroize", -] - -[[package]] + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pemfile", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tower 0.5.2", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.26.11", + "windows-registry", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.11", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rsa" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] name = "rustc-demangle" version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + [[package]] name = "rustix" version = "1.0.3" @@ -1386,6 +1826,50 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.23.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.15" @@ -1532,9 +2016,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + [[package]] name = "slab" version = "0.4.9" @@ -1608,6 +2098,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0" dependencies = [ "bytes", + "chrono", "crc", "crossbeam-queue", "either", @@ -1685,6 +2176,7 @@ dependencies = [ "bitflags 2.9.0", "byteorder", "bytes", + "chrono", "crc", "digest", "dotenvy", @@ -1703,7 +2195,7 @@ dependencies = [ "memchr", "once_cell", "percent-encoding", - "rand", + "rand 0.8.5", "rsa", "serde", "sha1", @@ -1726,6 +2218,7 @@ dependencies = [ "base64", "bitflags 2.9.0", "byteorder", + "chrono", "crc", "dotenvy", "etcetera", @@ -1741,7 +2234,7 @@ dependencies = [ "md-5", "memchr", "once_cell", - "rand", + "rand 0.8.5", "serde", "serde_json", "sha2", @@ -1760,6 +2253,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f85ca71d3a5b24e64e1d08dd8fe36c6c95c339a896cc33068148906784620540" dependencies = [ "atoi", + "chrono", "flume", "futures-channel", "futures-core", @@ -1782,9 +2276,11 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "chrono", "csv", "dotenv", "prost", + "reqwest", "serde", "serde_json", "sqlx", @@ -1797,6 +2293,7 @@ dependencies = [ "tonic-web", "tracing", "tracing-subscriber", + "zip", ] [[package]] @@ -1832,6 +2329,9 @@ name = "sync_wrapper" version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] [[package]] name = "tempfile" @@ -1896,6 +2396,25 @@ dependencies = [ "once_cell", ] +[[package]] +name = "time" +version = "0.3.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +dependencies = [ + "deranged", + "num-conv", + "powerfmt", + "serde", + "time-core", +] + +[[package]] +name = "time-core" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" + [[package]] name = "tinyvec" version = "1.6.0" @@ -1941,6 +2460,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.16" @@ -1990,7 +2519,7 @@ dependencies = [ "socket2", "tokio", "tokio-stream", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -2067,7 +2596,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand", + "rand 0.8.5", "slab", "tokio", "tokio-util", @@ -2076,6 +2605,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-http" version = "0.5.2" @@ -2094,15 +2638,15 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -2195,6 +2739,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.5.0" @@ -2254,6 +2804,102 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" +[[package]] +name = "wasm-bindgen" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.4", +] + +[[package]] +name = "webpki-roots" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "whoami" version = "1.5.1" @@ -2286,6 +2932,100 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.2.1", + "windows-result 0.4.1", + "windows-strings 0.5.1", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" +dependencies = [ + "windows-result 0.3.4", + "windows-strings 0.3.1", + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-strings" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" +dependencies = [ + "windows-link 0.1.3", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link 0.2.1", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -2334,6 +3074,23 @@ dependencies = [ "windows_x86_64_msvc 0.52.4", ] +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link 0.2.1", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -2346,6 +3103,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -2358,6 +3121,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -2370,6 +3139,18 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -2382,6 +3163,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -2394,6 +3181,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -2406,6 +3199,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -2418,6 +3217,12 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + [[package]] name = "wit-bindgen-rt" version = "0.39.0" @@ -2427,8 +3232,101 @@ dependencies = [ "bitflags 2.9.0", ] +[[package]] +name = "xz2" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" +dependencies = [ + "lzma-sys", +] + [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zip" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" +dependencies = [ + "aes", + "arbitrary", + "bzip2", + "constant_time_eq", + "crc32fast", + "crossbeam-utils", + "deflate64", + "displaydoc", + "flate2", + "getrandom 0.3.2", + "hmac", + "indexmap 2.7.1", + "lzma-rs", + "memchr", + "pbkdf2", + "sha1", + "thiserror 2.0.12", + "time", + "xz2", + "zeroize", + "zopfli", + "zstd", +] + +[[package]] +name = "zopfli" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249" +dependencies = [ + "bumpalo", + "crc32fast", + "log", + "simd-adler32", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/README.md b/README.md index bbc169a1..0fdf1cce 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,8 @@ The gRPC-Web API for provides nearby japanese train station. ## Documentation - For automation agent and contributor workflows, see [AGENTS.md](AGENTS.md). +- For system architecture and design decisions, see [docs/architecture.md](docs/architecture.md). +- For technical debt analysis and architectural concerns, see [docs/technical_debt.md](docs/technical_debt.md). ## Data Contribution @@ -103,3 +105,8 @@ We follow Rust best practices for testing: - **Integration tests** controlled by feature flags (opt-in when database is available) - **Cargo-native** test execution using standard `cargo test` commands - **Makefile shortcuts** for common testing workflows + +## Data Sources + +- Bus-related data provided by [Tokyo Metropolitan Bureau of Transportation (Toei)](https://www.kotsu.metro.tokyo.jp/), licensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/) +- Station data provided by [駅データ.jp](https://www.ekidata.jp/) diff --git a/compose.yml b/compose.yml index ff86f31f..bf8ee910 100644 --- a/compose.yml +++ b/compose.yml @@ -12,6 +12,7 @@ services: environment: DATABASE_URL: postgresql://stationapi:stationapi@psql/stationapi DISABLE_GRPC_WEB: false + DISABLE_BUS_FEATURE: false HOST: 0.0.0.0 ports: - 50051:50051 @@ -24,5 +25,7 @@ services: POSTGRES_PASSWORD: stationapi POSTGRES_DB: stationapi restart: always + ports: + - 5432:5432 volumes: - ./docker/postgres:/docker-entrypoint-initdb.d:ro \ No newline at end of file diff --git a/data/2!lines.csv b/data/2!lines.csv index 740984e1..36becacc 100644 --- a/data/2!lines.csv +++ b/data/2!lines.csv @@ -177,7 +177,7 @@ line_cd,company_cd,line_name,line_name_k,line_name_h,line_name_r,line_name_rn,li 11701,4,山陰本線,サンインホンセン,山陰本線(米子~益田),San’in Main Line,San’in Main Line,山阴本线,산인 본선,#BAE541,2,,,,,,,,,,,,,0,11701,3948.395125 11702,4,山陰本線,サンインホンセン,山陰本線(益田~下関),San’in Main Line,San’in Main Line,山阴本线,산인 본선,#BAE541,2,,,,,,,,,,,,,0,11702,3915.767279 11703,4,伯備線,ハクビセン,伯備線,Hakubi Line,Hakubi Line,伯备线,하쿠비선,#FF4500,2,,,,,,,,,,,,,0,11703,4414.982848 -11704,4,因美線,インビセン,因美線,Inbi Line,Inbi Line,因美线,인비선,#000080,2,B,,,,#000080,,,,REVERSED_SQUARE_WEST,,,,0,11704,3525.634867 +11704,4,因美線,インビセン,因美線,Inbi Line,Inbi Line,因美线,인비선,#AA731C,2,B,,,,#AA731C,,,,REVERSED_SQUARE_WEST,,,,0,11704,3525.634867 11705,4,境線,サカイセン,境線,Sakai Line,Sakai Line,境线,사카이선,#00FFFF,2,C,,,,#00FFFF,,,,REVERSED_SQUARE_WEST,,,,0,11705,1188.965154 11706,4,木次線,キスキセン,木次線,Kisuki Line,Kisuki Line,木次线,기스키선,#339966,2,E,,,,#339966,,,,REVERSED_SQUARE_WEST,,,,0,11706,3810.633068 11707,4,三江線,サンコウセン,三江線,Sanko Line,Sanko Line,三江线,산코선,#C9090B,2,,,,,,,,,,,,,2,11707,2733.853709 @@ -454,7 +454,7 @@ line_cd,company_cd,line_name,line_name_k,line_name_h,line_name_r,line_name_rn,li 99402,155,北越急行ほくほく線,ホクエツキュウコウホクホクセン,北越急行ほくほく線,Hokuhoku Line,Hokuhoku Line,,,#CC3366,2,,,,,,,,,,,,,0,99402,5225.959771 99403,156,しなの鉄道線,シナノテツドウセン,しなの鉄道線,Shinano Railway Line,Shinano Railway Line,,,#F0A401,2,,,,,,,,,,,,,0,99403,3245.235758 99427,156,北しなの線,キタシナノセン,北しなの線,Kita-Shinano Line,Kita-Shinano Line,,,#FEC9FE,2,,,,,,,,,,,,,0,99403,4478.301701 -99404,157,上田電鉄別所線,ウエダデンテツベッショセン,上田電鉄別所線,Ueda Electric Railway Bessho Line,Ueda Electric Railway Bessho Line,,,#E0F3C7,2,BE,,,,#0B2846,,,,REVERSED_ROUND,,,,0,99404,786.3767532 +99404,157,上田電鉄別所線,ウエダデンテツベッショセン,上田電鉄別所線,Ueda Electric Railway Bessho Line,Ueda Electric Railway Bessho Line,,,#0B2846,2,BE,,,,#0B2846,,,,REVERSED_ROUND,,,,0,99404,786.3767532 99405,158,長野電鉄長野線,ナガノデンテツナガノセン,長野電鉄長野線,Nagano Electric Railway Nagano Line,Nagano Electric Railway Nagano Line,,,#E21919,2,N,,,,#E21919,,,,ROUND,,,,0,99405,1335.199537 99406,158,長野電鉄屋代線,ナガノデンテツヤシロセン,長野電鉄屋代線,Nagano Electric Railway Yashiro Line,Nagano Electric Railway Yashiro Line,,,#E21919,2,Y,,,,#E21919,,,,ROUND,,,,2,99406,1909.79814 99407,159,上高地線,カミコウチセン,アルピコ交通上高地線,Kamikochi Line,Kamikochi Line,,,#465DAA,2,AK,,,,#465DAA,,,,ROUND,,,,0,99407,1086.699348 diff --git a/data/6!aliases.csv b/data/6!aliases.csv index 64601975..0622c105 100644 --- a/data/6!aliases.csv +++ b/data/6!aliases.csv @@ -10,4 +10,4 @@ id,line_name,line_name_k,line_name_h,line_name_r,line_name_zh,line_name_ko,line_ 9,神戸市営地下鉄西神・山手線,コウベシエイチカテツセイシン・ヤマテセン,神戸市営地下鉄西神・山手線,Kobe Municipal Subway Seishin-Yamate Line,,,,西神線・山手線を統合 10,中央西線,チュウオウサイセン,中央本線,Chuo West Line,,,, 11,中央東線,チュウオウトウセン,中央本線,Chuo East Line,,,, -12,総武快速線,ソウブカイソクセン,総武本線,Sobu Line,总武线,소부선,#0067C0, \ No newline at end of file +12,総武快速線,ソウブカイソクセン,総武本線,Sobu Line (Rapid Service),总武线,소부선,#0067C0, \ No newline at end of file diff --git a/data/create_table.sql b/data/create_table.sql index 8ebec897..4a763710 100644 --- a/data/create_table.sql +++ b/data/create_table.sql @@ -52,6 +52,19 @@ $$; ALTER TABLE IF EXISTS ONLY public.stations DROP CONSTRAINT IF EXISTS stations_line_cd_fkey; +-- Drop GTFS foreign key constraints before dropping primary keys +ALTER TABLE IF EXISTS ONLY public.gtfs_stops +DROP CONSTRAINT IF EXISTS gtfs_stops_station_cd_fkey; + +ALTER TABLE IF EXISTS ONLY public.gtfs_routes +DROP CONSTRAINT IF EXISTS gtfs_routes_line_cd_fkey; + +ALTER TABLE IF EXISTS ONLY public.gtfs_routes +DROP CONSTRAINT IF EXISTS gtfs_routes_agency_id_fkey; + +ALTER TABLE IF EXISTS ONLY public.gtfs_agencies +DROP CONSTRAINT IF EXISTS gtfs_agencies_company_cd_fkey; + ALTER TABLE IF EXISTS ONLY public.station_station_types DROP CONSTRAINT IF EXISTS station_station_types_type_cd_fkey; @@ -552,3 +565,294 @@ ADD CONSTRAINT stations_line_cd_fkey FOREIGN KEY (line_cd) REFERENCES public.lin -- -- PostgreSQL database dump complete -- + +-- ============================================================ +-- GTFS Bus Integration - Phase 1: Schema Extensions +-- ============================================================ + +-- +-- Add transport_type to stations table (0: Rail, 1: Bus) +-- + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'stations' AND column_name = 'transport_type' + ) THEN + ALTER TABLE public.stations ADD COLUMN transport_type INTEGER DEFAULT 0 NOT NULL; + END IF; +END $$; + +-- +-- Add transport_type to lines table (0: Rail, 1: Bus) +-- + +DO $$ +BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'lines' AND column_name = 'transport_type' + ) THEN + ALTER TABLE public.lines ADD COLUMN transport_type INTEGER DEFAULT 0 NOT NULL; + END IF; +END $$; + +-- +-- Create index for transport_type filtering +-- + +CREATE INDEX IF NOT EXISTS idx_stations_transport_type ON public.stations USING btree (transport_type); +CREATE INDEX IF NOT EXISTS idx_lines_transport_type ON public.lines USING btree (transport_type); + +-- ============================================================ +-- GTFS Tables +-- ============================================================ + +-- +-- Name: gtfs_agencies; Type: TABLE; Schema: public +-- GTFS agency information (bus operators) +-- + +DROP TABLE IF EXISTS public.gtfs_stop_times CASCADE; +DROP TABLE IF EXISTS public.gtfs_trips CASCADE; +DROP TABLE IF EXISTS public.gtfs_calendar_dates CASCADE; +DROP TABLE IF EXISTS public.gtfs_calendar CASCADE; +DROP TABLE IF EXISTS public.gtfs_stops CASCADE; +DROP TABLE IF EXISTS public.gtfs_routes CASCADE; +DROP TABLE IF EXISTS public.gtfs_agencies CASCADE; +DROP TABLE IF EXISTS public.gtfs_shapes CASCADE; +DROP TABLE IF EXISTS public.gtfs_feed_info CASCADE; + +CREATE UNLOGGED TABLE public.gtfs_agencies ( + agency_id VARCHAR(255) PRIMARY KEY, + agency_name TEXT NOT NULL, + agency_name_k TEXT, + agency_name_r TEXT, + agency_name_zh TEXT, + agency_name_ko TEXT, + agency_url TEXT, + agency_timezone VARCHAR(50) DEFAULT 'Asia/Tokyo', + agency_lang VARCHAR(10) DEFAULT 'ja', + agency_phone TEXT, + agency_fare_url TEXT, + company_cd INTEGER REFERENCES public.companies(company_cd) +); + +ALTER TABLE public.gtfs_agencies OWNER TO stationapi; + +-- +-- Name: gtfs_routes; Type: TABLE; Schema: public +-- GTFS route information (bus lines) +-- + +CREATE UNLOGGED TABLE public.gtfs_routes ( + route_id VARCHAR(255) PRIMARY KEY, + agency_id VARCHAR(255) REFERENCES public.gtfs_agencies(agency_id), + route_short_name TEXT, + route_long_name TEXT, + route_long_name_k TEXT, + route_long_name_r TEXT, + route_long_name_zh TEXT, + route_long_name_ko TEXT, + route_desc TEXT, + route_type INTEGER NOT NULL DEFAULT 3, -- 3 = Bus + route_url TEXT, + route_color VARCHAR(6), + route_text_color VARCHAR(6), + route_sort_order INTEGER, + line_cd INTEGER REFERENCES public.lines(line_cd) +); + +ALTER TABLE public.gtfs_routes OWNER TO stationapi; + +CREATE INDEX idx_gtfs_routes_agency_id ON public.gtfs_routes USING btree (agency_id); +CREATE INDEX idx_gtfs_routes_line_cd ON public.gtfs_routes USING btree (line_cd); + +-- +-- Name: gtfs_stops; Type: TABLE; Schema: public +-- GTFS stop information (bus stops) +-- + +CREATE UNLOGGED TABLE public.gtfs_stops ( + stop_id VARCHAR(255) PRIMARY KEY, + stop_code VARCHAR(50), + stop_name TEXT NOT NULL, + stop_name_k TEXT, + stop_name_r TEXT, + stop_name_zh TEXT, + stop_name_ko TEXT, + stop_desc TEXT, + stop_lat DOUBLE PRECISION NOT NULL, + stop_lon DOUBLE PRECISION NOT NULL, + zone_id VARCHAR(255), + stop_url TEXT, + location_type INTEGER DEFAULT 0, -- 0: stop, 1: station + parent_station VARCHAR(255), + stop_timezone VARCHAR(50), + wheelchair_boarding INTEGER, + platform_code VARCHAR(50), + station_cd INTEGER REFERENCES public.stations(station_cd) +); + +ALTER TABLE public.gtfs_stops OWNER TO stationapi; + +CREATE INDEX idx_gtfs_stops_station_cd ON public.gtfs_stops USING btree (station_cd); +CREATE INDEX idx_gtfs_stops_parent_station ON public.gtfs_stops USING btree (parent_station); + +DO $$ +BEGIN + BEGIN + EXECUTE 'CREATE INDEX IF NOT EXISTS idx_gtfs_stops_point ON public.gtfs_stops USING gist ((point(stop_lat, stop_lon)))'; + EXCEPTION + WHEN undefined_object THEN + RAISE NOTICE 'Skipping GiST point index for gtfs_stops; required operator class is unavailable.'; + WHEN insufficient_privilege THEN + RAISE NOTICE 'Skipping GiST point index for gtfs_stops; insufficient privileges.'; + END; +END $$; + +DO $$ +BEGIN + BEGIN + EXECUTE 'CREATE INDEX IF NOT EXISTS idx_gtfs_stops_name_trgm ON public.gtfs_stops USING gin (stop_name gin_trgm_ops)'; + EXECUTE 'CREATE INDEX IF NOT EXISTS idx_gtfs_stops_name_k_trgm ON public.gtfs_stops USING gin (stop_name_k gin_trgm_ops)'; + EXCEPTION + WHEN undefined_object THEN + RAISE NOTICE 'Skipping trigram GIN indexes for gtfs_stops; gin_trgm_ops operator class is unavailable.'; + WHEN insufficient_privilege THEN + RAISE NOTICE 'Skipping trigram GIN indexes for gtfs_stops; insufficient privileges.'; + END; +END $$; + +-- +-- Name: gtfs_calendar; Type: TABLE; Schema: public +-- GTFS calendar (service schedules) +-- + +CREATE UNLOGGED TABLE public.gtfs_calendar ( + service_id VARCHAR(255) PRIMARY KEY, + monday BOOLEAN NOT NULL DEFAULT FALSE, + tuesday BOOLEAN NOT NULL DEFAULT FALSE, + wednesday BOOLEAN NOT NULL DEFAULT FALSE, + thursday BOOLEAN NOT NULL DEFAULT FALSE, + friday BOOLEAN NOT NULL DEFAULT FALSE, + saturday BOOLEAN NOT NULL DEFAULT FALSE, + sunday BOOLEAN NOT NULL DEFAULT FALSE, + start_date DATE NOT NULL, + end_date DATE NOT NULL +); + +ALTER TABLE public.gtfs_calendar OWNER TO stationapi; + +-- +-- Name: gtfs_calendar_dates; Type: TABLE; Schema: public +-- GTFS calendar dates (service exceptions) +-- + +CREATE UNLOGGED TABLE public.gtfs_calendar_dates ( + id SERIAL PRIMARY KEY, + service_id VARCHAR(255) NOT NULL, + date DATE NOT NULL, + exception_type INTEGER NOT NULL -- 1: added, 2: removed +); + +ALTER TABLE public.gtfs_calendar_dates OWNER TO stationapi; + +CREATE INDEX idx_gtfs_calendar_dates_service_id ON public.gtfs_calendar_dates USING btree (service_id); +CREATE INDEX idx_gtfs_calendar_dates_date ON public.gtfs_calendar_dates USING btree (date); + +-- +-- Name: gtfs_trips; Type: TABLE; Schema: public +-- GTFS trip information +-- + +CREATE UNLOGGED TABLE public.gtfs_trips ( + trip_id VARCHAR(255) PRIMARY KEY, + route_id VARCHAR(255) NOT NULL REFERENCES public.gtfs_routes(route_id), + service_id VARCHAR(255) NOT NULL, + trip_headsign TEXT, + trip_headsign_k TEXT, + trip_headsign_r TEXT, + trip_short_name TEXT, + direction_id INTEGER, -- 0: outbound, 1: inbound + block_id VARCHAR(255), + shape_id VARCHAR(255), + wheelchair_accessible INTEGER, + bikes_allowed INTEGER +); + +ALTER TABLE public.gtfs_trips OWNER TO stationapi; + +CREATE INDEX idx_gtfs_trips_route_id ON public.gtfs_trips USING btree (route_id); +CREATE INDEX idx_gtfs_trips_service_id ON public.gtfs_trips USING btree (service_id); +CREATE INDEX idx_gtfs_trips_shape_id ON public.gtfs_trips USING btree (shape_id); + +-- +-- Name: gtfs_stop_times; Type: TABLE; Schema: public +-- GTFS stop times (timetable) +-- + +CREATE UNLOGGED TABLE public.gtfs_stop_times ( + id SERIAL PRIMARY KEY, + trip_id VARCHAR(255) NOT NULL REFERENCES public.gtfs_trips(trip_id), + arrival_time TEXT, -- GTFS allows times > 24:00 (e.g., "25:30:00") + departure_time TEXT, + stop_id VARCHAR(255) NOT NULL REFERENCES public.gtfs_stops(stop_id), + stop_sequence INTEGER NOT NULL, + stop_headsign TEXT, + pickup_type INTEGER DEFAULT 0, + drop_off_type INTEGER DEFAULT 0, + shape_dist_traveled DOUBLE PRECISION, + timepoint INTEGER DEFAULT 1 +); + +ALTER TABLE public.gtfs_stop_times OWNER TO stationapi; + +CREATE INDEX idx_gtfs_stop_times_trip_id ON public.gtfs_stop_times USING btree (trip_id); +CREATE INDEX idx_gtfs_stop_times_stop_id ON public.gtfs_stop_times USING btree (stop_id); +CREATE INDEX idx_gtfs_stop_times_arrival_time ON public.gtfs_stop_times USING btree (arrival_time); +CREATE UNIQUE INDEX idx_gtfs_stop_times_trip_stop_seq ON public.gtfs_stop_times USING btree (trip_id, stop_sequence); + +-- +-- Name: gtfs_shapes; Type: TABLE; Schema: public +-- GTFS shapes (route geometry) +-- + +CREATE UNLOGGED TABLE public.gtfs_shapes ( + id SERIAL PRIMARY KEY, + shape_id VARCHAR(255) NOT NULL, + shape_pt_lat DOUBLE PRECISION NOT NULL, + shape_pt_lon DOUBLE PRECISION NOT NULL, + shape_pt_sequence INTEGER NOT NULL, + shape_dist_traveled DOUBLE PRECISION +); + +ALTER TABLE public.gtfs_shapes OWNER TO stationapi; + +CREATE INDEX idx_gtfs_shapes_shape_id ON public.gtfs_shapes USING btree (shape_id); +CREATE UNIQUE INDEX idx_gtfs_shapes_id_seq ON public.gtfs_shapes USING btree (shape_id, shape_pt_sequence); + +-- +-- Name: gtfs_feed_info; Type: TABLE; Schema: public +-- GTFS feed metadata +-- + +CREATE UNLOGGED TABLE public.gtfs_feed_info ( + id SERIAL PRIMARY KEY, + feed_publisher_name TEXT NOT NULL, + feed_publisher_url TEXT, + feed_lang VARCHAR(10) DEFAULT 'ja', + feed_start_date DATE, + feed_end_date DATE, + feed_version TEXT, + feed_contact_email TEXT, + feed_contact_url TEXT, + imported_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +ALTER TABLE public.gtfs_feed_info OWNER TO stationapi; + +-- ============================================================ +-- End of GTFS Bus Integration Schema +-- ============================================================ diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 00000000..f8bb9e20 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,443 @@ +# StationAPI アーキテクチャドキュメント + +> 最終更新: 2026年1月 + +## 目次 + +- [概要](#概要) +- [レイヤー構造](#レイヤー構造) +- [データベース設計](#データベース設計) +- [gRPC/スキーマ設計](#grpcスキーマ設計) +- [命名規則](#命名規則) +- [キャッシュ戦略](#キャッシュ戦略) +- [データフロー](#データフロー) +- [ディレクトリ構造](#ディレクトリ構造) + +--- + +## 概要 + +StationAPI は日本の鉄道駅情報を提供する gRPC API です。**クリーンアーキテクチャ**に基づいた4層構造を採用し、ビジネスロジックと技術的関心事を明確に分離しています。 + +### 技術スタック + +| 項目 | 技術 | +|------|------| +| 言語 | Rust (Edition 2021) | +| ランタイム | tokio | +| データベース | PostgreSQL 15+ | +| ORM | sqlx (コンパイル時クエリ検証) | +| API | gRPC (tonic) | +| シリアライズ | Protocol Buffers | + +--- + +## レイヤー構造 + +StationAPI は4つの層で構成されています。各層は依存性の方向が内側(Domain)に向かうよう設計されています。 + +```txt +┌─────────────────────────────────────────────────────────┐ +│ Presentation 層 │ +│ (gRPC Controller, エラーハンドリング) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ UseCase 層 │ +│ (Interactor, DTO, ビジネスロジック) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ Infrastructure 層 │ +│ (Repository実装, Row構造体, DB接続) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ Domain 層 │ +│ (Entity, Repository Interface, ビジネスルール) │ +└─────────────────────────────────────────────────────────┘ +``` + +### Domain 層 (`src/domain/`) + +**責務**: コアビジネスロジックとデータモデルの定義 + +| ディレクトリ/ファイル | 内容 | +|---------------------|------| +| `entity/` | ドメインエンティティ(Station, Line, TrainType, Company など) | +| `repository/` | リポジトリインターフェース(`async_trait` を使用) | +| `normalize.rs` | テキスト正規化(ひらがな↔カタカナ、全角↔半角変換) | +| `error.rs` | ドメインエラー型(NotFound, InfrastructureError, Unexpected) | + +**設計原則**: +- 外部依存を持たない純粋な Rust コード +- リポジトリは trait として定義し、実装を Infrastructure 層に委譲 +- 多言語対応(日本語、カタカナ、ローマ字、中国語、韓国語) + +### UseCase 層 (`src/use_case/`) + +**責務**: アプリケーションビジネスロジックとデータ変換 + +| ディレクトリ/ファイル | 内容 | +|---------------------|------| +| `interactor/query.rs` | `QueryInteractor` - 主要なユースケース実装(約950行) | +| `traits/query.rs` | `QueryUseCase` トレイト定義(20以上の非同期メソッド) | +| `dto/` | データ変換オブジェクト(Entity ↔ gRPC メッセージ) | +| `error.rs` | ユースケースエラー型 | + +**重要なメソッド**: + +```rust +// update_station_vec_with_attributes (query.rs:169-265) +// - 駅データにライン、会社、列車種別を付加 +// - N+1問題を回避するバッチクエリ設計 +async fn update_station_vec_with_attributes( + &self, + mut stations: Vec, + line_group_id: Option, +) -> Result, UseCaseError> +``` + +### Infrastructure 層 (`src/infrastructure/`) + +**責務**: データ永続化と外部システム連携 + +| ファイル | 内容 | +|---------|------| +| `station_repository.rs` | `StationRow` + `MyStationRepository` 実装 | +| `line_repository.rs` | `LineRow` + `MyLineRepository` 実装 | +| `train_type_repository.rs` | `TrainTypeRow` + `MyTrainTypeRepository` 実装 | +| `company_repository.rs` | `CompanyRow` + `MyCompanyRepository` 実装 | + +**設計パターン**: +- 各 Repository は `Arc>` をラップ +- `Internal*Repository` 構造体に実際の SQL 実行を委譲 +- `#[derive(sqlx::FromRow)]` による型安全な Row マッピング + +### Presentation 層 (`src/presentation/`) + +**責務**: 外部 API の公開とリクエスト/レスポンスハンドリング + +| ファイル | 内容 | +|---------|------| +| `controller/grpc.rs` | `MyApi` - 14の gRPC エンドポイント実装 | +| `error.rs` | `PresentationalError` と `tonic::Status` への変換 | + +--- + +## データベース設計 + +### テーブル構成 + +すべてのテーブルは `UNLOGGED` として作成されパフォーマンスを優先しています。 + +| テーブル | 主キー | 概要 | +|---------|-------|------| +| `companies` | company_cd | 鉄道会社情報 | +| `lines` | line_cd | 路線情報 | +| `stations` | station_cd | 駅情報 | +| `types` | id | 列車種別 | +| `station_station_types` | id | 駅と列車種別の関連 | +| `line_aliases` | id | 路線エイリアス | +| `connections` | - | 駅間接続 | +| `aliases` | - | 検索用エイリアス | + +### パフォーマンス最適化 + +```sql +-- 使用している PostgreSQL 拡張 +CREATE EXTENSION IF NOT EXISTS pg_trgm; -- トライグラム検索 +CREATE EXTENSION IF NOT EXISTS btree_gist; -- GiST インデックス + +-- 主要インデックス +CREATE INDEX idx_stations_station_g_cd ON stations(station_g_cd); +CREATE INDEX idx_stations_line_cd ON stations(line_cd); +CREATE INDEX idx_performance_station_name_trgm ON stations + USING gin(station_name gin_trgm_ops); -- あいまい検索用 +``` + +### スキーマ更新時の注意点 + +1. **マイグレーション**: `data/create_table.sql` を更新 +2. **Row 構造体**: 対応する `*Row` 構造体を Infrastructure 層で更新 +3. **Entity**: 必要に応じて Domain 層の Entity を更新 +4. **変換ロジック**: `impl From for Xxx` を更新 +5. **DTO**: gRPC メッセージへの変換を `use_case/dto/` で更新 + +--- + +## gRPC/スキーマ設計 + +### サービスエンドポイント + +`stationapi.proto` で14のエンドポイントを定義: + +| カテゴリ | メソッド | +|---------|---------| +| 駅検索 | `GetStationById`, `GetStationByIdList`, `GetStationsByGroupId`, `GetStationsByCoordinates`, `GetStationsByLineId`, `GetStationsByName`, `GetStationsByLineGroupId` | +| 路線検索 | `GetLineById`, `GetLineByIdList`, `GetLinesByName` | +| 経路検索 | `GetRoutes`, `GetRoutesMinimal`, `GetConnectedRoutes` | +| 列車種別 | `GetTrainTypesByStationId`, `GetRouteTypes` | + +### Proto 更新時の注意点 + +1. **後方互換性**: 新フィールドには `optional` キーワードを使用 +2. **ビルド設定**: `build.rs` で `serde` トレイトを追加 +3. **DTO 更新**: `src/use_case/dto/*.rs` のマッピングを更新 +4. **テスト更新**: 新フィールドの統合テストを追加 + +```protobuf +// 後方互換性のある追加例 +message Station { + // 既存フィールド... + optional string new_field = 25; // optional で追加 +} +``` + +--- + +## 命名規則 + +### Row 構造体 vs Entity の区別 + +| 種別 | 場所 | 目的 | 特徴 | +|------|------|------|------| +| **Row** | `infrastructure/*.rs` | DB行の直接マッピング | `#[derive(sqlx::FromRow)]`、DBカラム名と一致 | +| **Entity** | `domain/entity/*.rs` | ドメインモデル | ビジネスロジック、ネスト構造、多言語対応 | + +### Row 構造体 + +```rust +// infrastructure/station_repository.rs +#[derive(sqlx::FromRow, Clone)] +pub struct StationRow { + pub station_cd: i32, // DBカラム名と一致 + pub station_g_cd: i32, + pub station_name: String, + pub line_cd: i32, + // ... 約19フィールド +} +``` + +**特徴**: +- フィールド名は PostgreSQL カラム名と**完全一致**(snake_case) +- データベースネイティブ型を使用: `i32`, `i64`, `f64`, `Option`, `String` +- ロジックを持たない純粋なデータホルダー + +### Entity 構造体 + +```rust +// domain/entity/station.rs +pub struct Station { + pub station_cd: u32, // ビジネス型(符号なし) + pub station_g_cd: u32, + pub station_name: String, + pub line: Option>, // ネスト構造 + pub lines: Vec, // コレクション + pub train_type: Option>, + pub station_numbers: Vec, + // ... 約66フィールド +} +``` + +**特徴**: +- ビジネスセマンティクスを反映した型(例: `StopCondition` 列挙型) +- ネスト構造を含む(`Option>`, `Vec` など) +- 多言語名をサポート: `station_name_r`(ローマ字), `station_name_zh`(中国語), `station_name_ko`(韓国語) +- `Clone`, `Debug`, `Serialize`, `Deserialize`, `PartialEq` を実装 + +### 変換フロー + +```txt +Database (PostgreSQL) + ↓ +Row (sqlx::FromRow) ← 直接マッピング: StationRow + ↓ +Entity (From) ← 型変換、None初期化: Station + ↓ +Enriched Entity ← UseCase層でネストデータ追加 + ↓ +gRPC Message ← Proto変換: proto::Station + ↓ +Network Response +``` + +--- + +## キャッシュ戦略 + +### 現在の設計: 明示的キャッシュなし + +StationAPI は現時点で明示的なインメモリキャッシュを実装していません。その代わり、以下の最適化戦略を採用しています。 + +### バッチクエリによる暗黙的キャッシュ + +`query.rs:169-265` の `update_station_vec_with_attributes` メソッドでは、N+1問題を回避するためにバッチクエリを使用しています。 + +```rust +// 1. すべての station_g_cd を抽出 +let station_group_ids = stations.iter() + .map(|s| s.station_g_cd as u32) + .collect::>(); + +// 2. 一括クエリで関連データを取得(N+1回避) +let stations_by_group_ids = self + .get_stations_by_group_id_vec(&station_group_ids).await?; +let lines = self + .get_lines_by_station_group_id_vec(&station_group_ids).await?; +let train_types = self + .get_train_types_by_station_id_vec(&station_ids, line_group_id).await?; + +// 3. メモリ上で関連付け(O(1)クエリ/エンリッチメント) +``` + +**結果**: エンリッチメント処理あたり**O(1)クエリ**(N駅に対してN回のクエリではない) + +### HashSet による重複排除 + +`query.rs:223` 付近でインメモリ重複排除を実施: + +```rust +let mut seen_line_cds = std::collections::HashSet::new(); +let lines: Vec = lines + .iter() + .filter(|&l| { + l.station_g_cd.unwrap_or(0) == station.station_g_cd + && seen_line_cds.insert(l.line_cd) // HashSetで重複防止 + }) + .cloned() + .collect(); +``` + +### キャッシュを実装しない理由 + +1. **データ規模**: 日本の鉄道データは比較的小規模(約9,000駅) +2. **更新頻度**: CSV インポートによるデータ更新が前提 +3. **ステートレス設計**: 各リクエストは独立して処理 +4. **PostgreSQL の最適化**: インデックスとクエリプランナーによる効率化 + +### 将来の検討事項 + +大規模化や高頻度アクセスが必要な場合: +- `moka` や `lru` クレートによる有界インメモリキャッシュ +- CSV インポート時のキャッシュ無効化 +- `station_g_cd` 単位のタグベース無効化 + +--- + +## データフロー + +### 典型的なリクエストフロー + +```txt +[Client] + │ + ▼ gRPC Request +┌──────────────────────────────────────────────┐ +│ Presentation 層 (grpc.rs) │ +│ └─ MyApi::get_stations_by_id() │ +└──────────────────────────────────────────────┘ + │ + ▼ QueryUseCase メソッド呼び出し +┌──────────────────────────────────────────────┐ +│ UseCase 層 (query.rs) │ +│ ├─ QueryInteractor::get_station_by_id() │ +│ └─ update_station_vec_with_attributes() │ +│ ├─ 駅グループ一括取得 │ +│ ├─ 路線一括取得 │ +│ ├─ 会社一括取得 │ +│ └─ 列車種別一括取得 │ +└──────────────────────────────────────────────┘ + │ + ▼ Repository メソッド呼び出し +┌──────────────────────────────────────────────┐ +│ Infrastructure 層 (station_repository.rs) │ +│ └─ MyStationRepository::find_by_id() │ +│ └─ SQL クエリ実行 (sqlx) │ +└──────────────────────────────────────────────┘ + │ + ▼ Row → Entity 変換 +┌──────────────────────────────────────────────┐ +│ Domain 層 (entity/station.rs) │ +│ └─ impl From for Station │ +└──────────────────────────────────────────────┘ + │ + ▼ Entity → gRPC Message 変換 +┌──────────────────────────────────────────────┐ +│ UseCase 層 (dto/station.rs) │ +│ └─ impl From for proto::Station │ +└──────────────────────────────────────────────┘ + │ + ▼ gRPC Response +[Client] +``` + +### エラー伝播チェーン + +```txt +DomainError (sqlx エラー等) + ↓ ?演算子 +UseCaseError (ユースケース層) + ↓ From トレイト +PresentationalError (プレゼンテーション層) + ↓ Into トレイト +tonic::Status (gRPC ワイヤーフォーマット) +``` + +--- + +## ディレクトリ構造 + +```txt +stationapi/src/ +├── domain/ # コアビジネスロジック +│ ├── entity/ # ドメインエンティティ +│ │ ├── station.rs # Station (66フィールド) +│ │ ├── line.rs # Line (40フィールド) +│ │ ├── train_type.rs # TrainType +│ │ ├── company.rs # Company +│ │ ├── line_symbol.rs # LineSymbol +│ │ └── station_number.rs # StationNumber +│ ├── repository/ # 抽象インターフェース +│ │ ├── station_repository.rs +│ │ ├── line_repository.rs +│ │ ├── train_type_repository.rs +│ │ └── company_repository.rs +│ ├── normalize.rs # テキスト正規化 +│ └── error.rs # DomainError +│ +├── use_case/ # アプリケーションロジック +│ ├── interactor/ +│ │ └── query.rs # QueryInteractor (約950行) +│ ├── traits/ +│ │ └── query.rs # QueryUseCase トレイト +│ ├── dto/ # データ変換 +│ │ ├── station.rs +│ │ ├── line.rs +│ │ ├── train_type.rs +│ │ └── company.rs +│ └── error.rs # UseCaseError +│ +├── infrastructure/ # データ永続化 +│ ├── station_repository.rs # StationRow + MyStationRepository +│ ├── line_repository.rs # LineRow + MyLineRepository +│ ├── train_type_repository.rs # TrainTypeRow + MyTrainTypeRepository +│ ├── company_repository.rs # CompanyRow + MyCompanyRepository +│ └── error.rs # InfrastructureError +│ +├── presentation/ # 外部API +│ ├── controller/ +│ │ └── grpc.rs # MyApi (14エンドポイント) +│ └── error.rs # PresentationalError +│ +├── lib.rs # モジュール宣言 +└── main.rs # エントリーポイント +``` + +--- + +## 関連ドキュメント + +- [技術負債分析レポート](./technical_debt.md) +- [リポジトリテストガイド](./repository_testing.md) +- [データ貢献ガイドライン](../data/README.md) diff --git a/docs/gtfs-bus-integration-research.md b/docs/gtfs-bus-integration-research.md new file mode 100644 index 00000000..543c9d1f --- /dev/null +++ b/docs/gtfs-bus-integration-research.md @@ -0,0 +1,549 @@ +# GTFS都営バスデータ導入に関する調査報告書 + +## 概要 + +本ドキュメントは、既存のStationAPI(日本の鉄道駅データを扱うgRPC API)に、GTFSフォーマットの都営バスデータを導入する際の懸念点をまとめたものである。 + +--- + +## 1. 現在のStationAPIの構造 + +### 1.1 技術スタック + +| 項目 | 技術 | +|------|------| +| 言語 | Rust (edition 2021) | +| 非同期ランタイム | Tokio | +| API | gRPC (Tonic) + gRPC-Web | +| データベース | PostgreSQL 18 | +| ORM | SQLx | + +### 1.2 データモデル + +```text +companies (鉄道会社) + ↓ +lines (路線) + ↓ +stations (駅) + ↓ +station_station_types (駅と列車種別の関連) + ↓ +types (列車種別) +``` + +### 1.3 主要テーブル + +| テーブル | レコード数 | 説明 | +|----------|-----------|------| +| companies | 173 | 鉄道会社情報 | +| lines | 623 | 路線情報 | +| stations | 11,141 | 駅情報 | +| types | 317 | 列車種別 | +| station_station_types | 41,005 | 駅と列車種別の関連 | +| connections | 17,664 | 駅間接続情報 | + +### 1.4 主要なAPIエンドポイント + +- `get_station_by_id` - ID指定で駅取得 +- `get_stations_by_coordinates` - 座標から周辺駅を取得 +- `get_stations_by_line_id` - 路線内の駅を取得 +- `get_stations_by_name` - 駅名検索(複数言語対応) +- `get_train_types_by_station_id` - 駅の列車種別を取得 +- `get_routes` - ルート検索 + +--- + +## 2. GTFSフォーマットの構造 + +### 2.1 標準ファイル構成 + +#### 必須ファイル + +| ファイル | 説明 | +|----------|------| +| agency.txt | 交通事業者情報 | +| stops.txt | 停留所・駅情報 | +| routes.txt | 路線情報 | +| trips.txt | 便(トリップ)情報 | +| stop_times.txt | 停留所での到着・出発時刻 | + +#### 条件付き必須ファイル + +| ファイル | 説明 | +|----------|------| +| calendar.txt | サービス日(週単位のスケジュール) | +| calendar_dates.txt | サービス日の例外 | + +#### オプショナルファイル + +| ファイル | 説明 | +|----------|------| +| shapes.txt | 路線の地理的形状 | +| frequencies.txt | 便の頻度情報 | +| transfers.txt | 乗換情報 | +| translations.txt | 多言語対応 | + +### 2.2 GTFSデータモデル + +```text +agency (事業者) + ↓ +routes (路線) + ↓ +trips (便) ← calendar (サービスカレンダー) + ↓ +stop_times (時刻表) + ↓ +stops (停留所) +``` + +### 2.3 都営バスGTFSデータの特徴 + +- **提供元**: ODPT(公共交通オープンデータセンター) +- **フォーマット**: GTFS-JP(国土交通省標準) +- **多言語対応**: 日本語、英語、中国語、韓国語 +- **リアルタイムデータ**: GTFS-RT形式でバス位置情報を配信 + +--- + +## 3. 懸念点 + +### 3.1 データモデルの根本的な違い + +#### 概念の比較 + +| 概念 | 鉄道(現在) | バス(GTFS) | 差異 | +|------|-------------|-------------|------| +| 時刻表 | なし | trips + stop_times | **新規追加が必要** | +| 便(Trip) | 存在しない | 核心概念 | **新規追加が必要** | +| サービスカレンダー | 停車条件で簡易対応 | calendar.txtで詳細管理 | **新規追加が必要** | +| 運行パターン | train_type | tripごとに定義 | 設計変更が必要 | + +#### 影響 + +- 時刻表データを扱うための新しいエンティティ(Trip, StopTime, Calendar)の追加が必要 +- 既存の `train_type` モデルではバスの運行パターンを表現しきれない + +--- + +### 3.2 ID体系の衝突リスク + +#### 現在のID体系 + +```rust +station_cd: u32 // 数値型(例: 1130101) +line_cd: u32 // 数値型 +company_cd: u32 // 数値型 +``` + +#### GTFSのID体系 + +```text +stop_id: String // 文字列型(例: "0001_01") +route_id: String // 文字列型 +agency_id: String // 文字列型 +``` + +#### 懸念点 + +- 数値型 vs 文字列型の違いによる型変換の必要性 +- 既存の `station_cd` と GTFS `stop_id` を統一するか分離するかの設計判断 +- グローバル一意性を確保するためのプレフィックス戦略の検討 + +#### 対応案 + +```rust +// 案1: 統一ID型 +enum TransportId { + Rail(u32), + Bus(String), +} + +// 案2: 文字列に統一 +station_id: String // "rail_1130101" or "bus_0001_01" +``` + +--- + +### 3.3 「駅」と「停留所」の概念の違い + +| 属性 | 鉄道駅 | バス停留所 | +|------|--------|-----------| +| 数量 | 約11,000 | 都営バスだけで約4,000以上 | +| 密度 | 比較的疎 | 非常に密集(数百m間隔) | +| グループ化 | `station_g_cd`で統合 | 統合基準が曖昧 | +| 永続性 | 比較的安定 | 頻繁に移設・廃止 | +| 命名規則 | 「○○駅」 | 「○○」「○○前」など多様 | + +#### 懸念点 + +- データ量の大幅増加(約1.5〜2倍) +- 座標検索時のパフォーマンス劣化 +- バス停同士のグループ化ロジックの新規実装 +- 鉄道駅とバス停の乗り換え判定基準 + +--- + +### 3.4 路線の概念の違い + +#### 鉄道路線の特徴 + +- 明確な起点・終点 +- 駅の並び順が固定 +- 路線シンボル(最大4個)で識別 +- `line_type`: 新幹線、在来線、地下鉄、モノレール等 + +#### バス路線の特徴 + +- 循環路線、枝分かれ路線が多い +- 同一路線番号で複数の経路パターン +- 行き先(headsign)による区別が重要 +- 系統番号による管理 + +#### 懸念点 + +- 現在の `lines` テーブルの `line_type` に「バス」を追加するだけでは不十分 +- バス特有の「系統」概念のモデル化 +- 経路パターン(shapes.txt)の保存・活用方法 + +--- + +### 3.5 列車種別 vs 運行パターン + +#### 現在の train_type モデル + +```sql +-- types テーブル +type_cd -- 列車種別コード +type_name -- 種別名(快速、急行等) +color -- 表示色 +direction -- 方向(0:双方向, 1:上り, 2:下り) +kind -- 種別(0:通常, 1:快速, 2:急行等) + +-- 停車条件(pass フィールド) +0: 全停車 +1: 停車なし(通過) +2: 一部停車 +3: 平日のみ +4: 休日のみ +5: 部分停車 +``` + +#### バスの運行パターン + +- 急行・各停の概念が薄い(一部路線を除く) +- 時間帯依存(深夜バス、早朝便等) +- 曜日・祝日による運行有無 +- GTFSでは `trip` 単位 + `calendar` で管理 + +#### 懸念点 + +- 既存の `station_station_types` の設計ではバスの運行パターンを表現困難 +- カレンダーベースの運行管理モデルの新規追加が必要 + +--- + +### 3.6 APIエンドポイントへの影響 + +#### 既存エンドポイントの課題 + +| エンドポイント | 課題 | +|---------------|------| +| `get_station_by_id` | バス停も含めるか?ID体系の違いは? | +| `get_stations_by_coordinates` | バス停の大量返却によるレスポンス肥大化 | +| `get_stations_by_line_id` | バス系統IDの扱い方 | +| `get_stations_by_name` | 「○○バス停」「○○前」等の検索対応 | +| `get_train_types_by_station_id` | バスには適用不可 | +| `get_routes` | 鉄道・バス横断の乗換検索の複雑化 | + +#### 対応案 + +```protobuf +// 案1: フィルタパラメータの追加 +message GetStationsByCoordinatesRequest { + double latitude = 1; + double longitude = 2; + int32 limit = 3; + TransportType transport_type = 4; // RAIL, BUS, ALL +} + +// 案2: バス専用エンドポイントの追加 +service BusStopApi { + rpc GetBusStopById(GetBusStopByIdRequest) returns (BusStopResponse); + rpc GetBusStopsByRouteId(GetBusStopsByRouteIdRequest) returns (MultipleBusStopResponse); +} +``` + +--- + +### 3.7 データベースへの影響 + +#### スキーマ拡張案 + +```sql +-- 案1: GTFSテーブルを別途追加 +CREATE TABLE gtfs_agencies ( + agency_id VARCHAR PRIMARY KEY, + agency_name VARCHAR NOT NULL, + agency_url VARCHAR, + agency_timezone VARCHAR +); + +CREATE TABLE gtfs_stops ( + stop_id VARCHAR PRIMARY KEY, + stop_code VARCHAR, + stop_name VARCHAR NOT NULL, + stop_lat DOUBLE PRECISION, + stop_lon DOUBLE PRECISION, + location_type INT -- 0:停留所, 1:駅 +); + +CREATE TABLE gtfs_routes ( + route_id VARCHAR PRIMARY KEY, + agency_id VARCHAR REFERENCES gtfs_agencies, + route_short_name VARCHAR, + route_long_name VARCHAR, + route_type INT, -- 3:バス + route_color VARCHAR +); + +CREATE TABLE gtfs_trips ( + trip_id VARCHAR PRIMARY KEY, + route_id VARCHAR REFERENCES gtfs_routes, + service_id VARCHAR, + trip_headsign VARCHAR, + direction_id INT +); + +CREATE TABLE gtfs_stop_times ( + trip_id VARCHAR REFERENCES gtfs_trips, + stop_id VARCHAR REFERENCES gtfs_stops, + arrival_time TIME, + departure_time TIME, + stop_sequence INT, + PRIMARY KEY (trip_id, stop_sequence) +); + +CREATE TABLE gtfs_calendar ( + service_id VARCHAR PRIMARY KEY, + monday BOOLEAN, + tuesday BOOLEAN, + wednesday BOOLEAN, + thursday BOOLEAN, + friday BOOLEAN, + saturday BOOLEAN, + sunday BOOLEAN, + start_date DATE, + end_date DATE +); +``` + +```sql +-- 案2: 既存テーブルの拡張 +ALTER TABLE stations ADD COLUMN transport_type INT DEFAULT 0; -- 0:鉄道, 1:バス +ALTER TABLE stations ADD COLUMN gtfs_stop_id VARCHAR; +ALTER TABLE lines ADD COLUMN is_bus BOOLEAN DEFAULT FALSE; +ALTER TABLE lines ADD COLUMN gtfs_route_id VARCHAR; +``` + +#### パフォーマンス懸念 + +| 項目 | 現在 | バス追加後(推定) | +|------|------|-------------------| +| stations レコード数 | 11,141 | 15,000〜20,000 | +| インデックスサイズ | - | 1.5〜2倍 | +| stop_times レコード数 | 0 | 数百万〜数千万 | + +--- + +### 3.8 座標検索のパフォーマンス + +#### 現在の実装 + +```sql +-- idx_performance_stations_point インデックス使用 +SELECT * FROM stations +ORDER BY point(lon, lat) <-> point($1, $2) +LIMIT $3; +``` + +#### 懸念点 + +- バス停追加で検索対象が1.5〜2倍に増加 +- 都心部ではバス停が密集(半径500m内に数十箇所) +- 駅とバス停の混在表示の是非 + +#### 対応案 + +```sql +-- transport_type でフィルタリング +SELECT * FROM stations +WHERE transport_type = $4 -- または transport_type IN (...) +ORDER BY point(lon, lat) <-> point($1, $2) +LIMIT $3; + +-- パーティショニングの検討 +CREATE TABLE stations_rail PARTITION OF stations FOR VALUES IN (0); +CREATE TABLE stations_bus PARTITION OF stations FOR VALUES IN (1); +``` + +--- + +### 3.9 データ更新・同期の問題 + +| 項目 | 鉄道データ | GTFSバスデータ | +|------|-----------|---------------| +| 更新頻度 | 年数回(ダイヤ改正時) | 週次〜月次 | +| データソース | 独自収集・手動更新 | ODPT API | +| フォーマット | 独自CSV | GTFS標準(ZIP) | +| 認証 | 不要 | ODPT APIキー必要 | + +#### 必要な追加実装 + +1. **GTFSフィードのダウンロード処理** + - ODPT APIからのデータ取得 + - ZIP解凍・パース処理 + +2. **差分更新ロジック** + - 既存データとの比較 + - 追加・更新・削除の判定 + +3. **バージョン管理** + - フィードバージョンの追跡 + - ロールバック機能 + +4. **定期実行基盤** + - cronジョブまたはスケジューラ + - 更新通知・ログ + +--- + +### 3.10 多言語対応の差異 + +#### 現在の多言語フィールド + +```rust +station_name: String, // 日本語 +station_name_k: String, // カタカナ +station_name_r: String, // ローマ字 +station_name_zh: String, // 中国語 +station_name_ko: String, // 韓国語 +``` + +#### GTFSの多言語対応 + +- `translations.txt` でオプショナル対応 +- 都営バスGTFSに全言語が含まれる保証なし + +#### 懸念点 + +- 多言語データの欠損処理(NULLable対応) +- 既存の言語サポートレベルとの整合性 +- ローマ字の自動生成ロジック検討 + +--- + +### 3.11 「乗り換え」の複雑化 + +#### 現在の接続モデル + +```sql +-- connections テーブル +station_cd1 -- 駅コード1 +station_cd2 -- 駅コード2 +distance -- 駅間距離(メートル) +``` + +#### バス導入後の複雑性 + +| 乗り換えパターン | 現在 | バス導入後 | +|-----------------|------|-----------| +| 鉄道 ↔ 鉄道 | 対応済み | 継続 | +| 鉄道 ↔ バス | - | **新規対応必要** | +| バス ↔ バス | - | **新規対応必要** | + +#### 追加考慮事項 + +- 徒歩圏内のバス停グループ化 +- 時刻表ベースの乗り換え可否判定 +- 乗り換え時間の推定 +- GTFSの `transfers.txt` の活用 + +--- + +## 4. 対応アプローチ案 + +### 4.1 アプローチ比較 + +| アプローチ | 概要 | メリット | デメリット | +|-----------|------|---------|-----------| +| **A. 完全分離** | GTFSデータを別DBで管理し、APIも分離 | 既存影響なし、段階的開発可能 | コード重複、統合検索困難 | +| **B. 統合拡張** | 既存スキーマを拡張し、統一APIで提供 | 統一API、乗換検索容易 | 大規模リファクタ、複雑化 | +| **C. アダプタ層** | GTFS標準のまま保持し、変換層を設ける | GTFS標準準拠、外部互換性 | 変換オーバーヘッド | + +### 4.2 推奨アプローチ + +#### 段階的な統合拡張(B案のバリエーション) + +#### Phase 1: 基盤整備 + +- transport_type の導入(鉄道=0, バス=1) +- ID体系の統一検討 +- GTFSパーサーの実装 + +#### Phase 2: バス停留所の導入 + +- stations テーブルの拡張 +- 座標検索の最適化 +- バス停用インデックス追加 + +#### Phase 3: 路線・時刻表の導入 + +- GTFSテーブル群の追加 +- 時刻表検索API追加 +- 運行カレンダー対応 + +#### Phase 4: 統合検索 + +- 鉄道・バス横断の乗換検索 +- 最適経路探索 + +--- + +## 5. まとめ + +### 主要懸念点 + +1. **データモデルの拡張**: 時刻表・便・カレンダーの概念追加が必要 +2. **ID体系**: 数値 vs 文字列、名前空間の衝突回避 +3. **データ量**: バス停追加によるDB肥大化とパフォーマンス +4. **API設計**: 後方互換性 vs 新機能のバランス +5. **更新運用**: GTFSデータの定期取り込みパイプライン +6. **乗り換え検索**: 鉄道・バス横断の複雑なルート検索 + +### 次のステップ + +1. 都営バスGTFSデータの実データ取得・分析 +2. ID体系の統一方針決定 +3. スキーマ設計の詳細化 +4. プロトタイプ実装による検証 + +--- + +## 参考資料 + +### GTFS関連 + +- [GTFS.org - General Transit Feed Specification](https://gtfs.org/) +- [GTFS Reference](https://gtfs.org/documentation/schedule/reference/) +- [GTFS.JP - 標準的なバス情報フォーマット](https://www.gtfs.jp/) + +### 都営バス・東京交通データ + +- [公共交通オープンデータセンター (ODPT)](https://www.odpt.org/) +- [東京公共交通オープンデータチャレンジ](https://tokyochallenge.odpt.org/) + +### 国土交通省 + +- [静的バス情報フォーマット(GTFS-JP)仕様書](https://www.mlit.go.jp/sogoseisaku/transport/sosei_transport_tk_000112.html) diff --git a/docs/nearby-bus-stops.md b/docs/nearby-bus-stops.md new file mode 100644 index 00000000..e97b9d1f --- /dev/null +++ b/docs/nearby-bus-stops.md @@ -0,0 +1,104 @@ +# 近傍バス停検索機能 + +鉄道駅から半径300m以内のバス停を同一グループとして返す機能の仕様。 + +## 概要 + +各APIで`transport_type`パラメータを使用して、鉄道駅に加えて近くのバス停を含めるかどうかを制御できる。 + +## パラメータ + +### TransportType + +```protobuf +enum TransportType { + TransportTypeUnspecified = 0; // 鉄道駅 + 近くのバス停を含める + Rail = 1; // 鉄道駅のみ + Bus = 2; // バス停のみ +} +``` + +## 動作仕様 + +| transport_type | 動作 | +|----------------|------| +| **未指定 / Unspecified** | 鉄道駅を取得し、最初の鉄道駅から半径300m以内のバス停も追加して返す | +| **Rail** | 鉄道駅のみを返す | +| **Bus** | 最初の鉄道駅から半径300m以内のバス停のみを返す | + +## 対象API + +| API | 近傍バス停対応 | 備考 | +|-----|---------------|------| +| `GetStationById` | ✅ | | +| `GetStationByIdList` | ✅ | | +| `GetStationsByGroupId` | ✅ | | +| `GetStationsByLineId` | ❌ | 路線の停車駅のみ返す(`transport_type`は無視) | +| `GetStationsByLineGroupId` | ❌ | 路線の停車駅のみ返す(`transport_type`は無視) | +| `GetStationsByCoordinates` | ✅ | | +| `GetStationsByName` | ✅ | | + +**注**: 路線系API(`GetStationsByLineId`、`GetStationsByLineGroupId`)は、路線の停車駅一覧を返すため、近傍バス停を混在させることは意味がありません。これらのAPIでは`transport_type`パラメータは無視されます。 + +## 距離計算 + +- **アルゴリズム**: Haversine公式(地球の曲率を考慮) +- **半径**: 300メートル(定数 `NEARBY_BUS_STOP_RADIUS_METERS`) +- **基準点**: 取得した鉄道駅の最初の1件の座標 + +## 使用例 + +### 鉄道駅 + 近くのバス停を取得 + +```protobuf +// transport_type未指定で鉄道駅と近くのバス停を両方取得 +GetStationByGroupIdRequest { + group_id: 1130201 +} +``` + +### 鉄道駅のみを取得 + +```protobuf +GetStationByGroupIdRequest { + group_id: 1130201 + transport_type: Rail +} +``` + +### 近くのバス停のみを取得 + +```protobuf +GetStationByGroupIdRequest { + group_id: 1130201 + transport_type: Bus +} +``` + +## 実装詳細 + +### 関連ファイル + +- `proto/stationapi.proto`: リクエスト定義 +- `src/use_case/interactor/query.rs`: ビジネスロジック +- `src/presentation/controller/grpc.rs`: gRPCコントローラー + +### 定数 + +```rust +// src/use_case/interactor/query.rs +const NEARBY_BUS_STOP_RADIUS_METERS: f64 = 300.0; +``` + +### ヘルパーメソッド + +```rust +/// 指定座標から半径300m以内のバス停を取得 +async fn get_nearby_bus_stops(&self, ref_lat: f64, ref_lon: f64) -> Result, UseCaseError> +``` + +## 注意事項 + +- バス停検索は最大50件の候補を取得し、その中から300m以内のものをフィルタリング +- 鉄道駅が存在しない場合、`transport_type: Bus`は空の結果を返す +- 複数の鉄道駅がある場合、最初の1件の座標を基準点として使用 diff --git a/docs/technical_debt.md b/docs/technical_debt.md new file mode 100644 index 00000000..8e27147a --- /dev/null +++ b/docs/technical_debt.md @@ -0,0 +1,324 @@ +# StationAPI 技術負債分析レポート + +> 最終更新: 2025年12月 + +## 目次 + +- [概要](#概要) +- [プロジェクト情報](#プロジェクト情報) +- [高優先度の技術負債](#高優先度の技術負債) +- [中優先度の技術負債](#中優先度の技術負債) +- [低優先度の技術負債](#低優先度の技術負債) +- [良好な点](#良好な点) +- [改善提案](#改善提案) + +--- + +## 概要 + +本ドキュメントは StationAPI プロジェクトの技術負債を分析・整理したものです。技術負債は優先度別に分類され、各項目には該当ファイルと行番号が記載されています。 + +--- + +## プロジェクト情報 + +| 項目 | 内容 | +|------|------| +| 言語 | Rust (Edition 2021) | +| アーキテクチャ | クリーンアーキテクチャ (Domain/UseCase/Infrastructure/Presentation) | +| 主要依存関係 | tokio 1.28.0, sqlx 0.8.3, tonic 0.12.3 | +| コード規模 | 約 10,600 行 (Rust) | +| データ | 8つの CSV ファイル (日本の鉄道データ) | + +--- + +## 高優先度の技術負債 + +### 1. 過大な構造体設計 + +#### Station 構造体 + +- **ファイル**: `stationapi/src/domain/entity/station.rs:8-76` +- **フィールド数**: 64個 +- **問題点**: + - 駅情報、路線情報、列車種別情報が1つの構造体に混在 + - `Line`, `TrainType`, `StationNumber` などの関連データを包含 + - 責務分離が不明確 + - 線号シンボル (`symbol1-4`) と色・形状の組み合わせが手動管理 + +```rust +pub struct Station { + // 駅情報 (station_cd, station_g_cd, station_name, ...) + // 路線情報 (line_cd, line, lines, line_name, line_symbol1, ...) + // 列車種別情報 (train_type, type_name, ...) + // 合計64フィールド +} +``` + +#### Line 構造体 + +- **ファイル**: `stationapi/src/domain/entity/line.rs:6-41` +- **フィールド数**: 33個 +- **問題点**: + - `Station` の埋め込み参照を含む (循環参照の可能性) + - `TrainType` の埋め込み参照を含む + - 線号シンボルが4つまで (`line_symbol1-4`) に制限 → スケーラビリティ問題 + +#### StationRow 構造体 + +- **ファイル**: `stationapi/src/infrastructure/station_repository.rs:19-79` +- **フィールド数**: 79個 +- **問題点**: + - 複数テーブルから大量のカラムを JOIN で取得 + - Row 構造体と Entity の変換が複雑 + +#### Clippy 警告の抑制 + +以下の箇所で `#![allow(clippy::too_many_arguments)]` が impl ブロック内で使用されています: + +| ファイル | 構造体 | +|----------|--------| +| `src/domain/entity/station.rs:79` | Station | +| `src/domain/entity/line.rs:43` | Line | +| `src/domain/entity/train_type.rs:25` | TrainType | +| `src/domain/entity/company.rs:20` | Company | + +--- + +### 2. SQL クエリの未最適化 (TODO 対応必須) + +アプリケーション層でデータベースから全データを取得後、メモリ上でフィルタリングを行っている箇所があります。 + +| ファイル | 行番号 | 内容 | +|----------|--------|------| +| `stationapi/src/use_case/interactor/query.rs` | 604 | `// TODO: SQLで同等の処理を行う` - 経路検証がアプリケーション側で実行 | +| `stationapi/src/use_case/interactor/query.rs` | 702 | `// TODO: SQLで同等の処理を行う` - 経路フィルタリングがアプリケーション層で処理 | +| `stationapi/src/use_case/interactor/query.rs` | 843 | `// TODO: 未実装` - `get_connected_stations()` が空配列を返却 | + +```rust +// query.rs:604-610 +// TODO: SQLで同等の処理を行う +let includes_requested_station = stops + .iter() + .any(|stop| stop.group_id == from_station_id || stop.group_id == to_station_id); +``` + +**影響**: パフォーマンス低下の可能性 + +--- + +### 3. 過度な clone() の使用 + +- **ファイル**: `stationapi/src/use_case/interactor/query.rs` +- **clone() 呼び出し回数**: 94回 + +主な箇所: + +| 行番号 | 内容 | +|--------|------| +| 508, 578 | `line.clone()` | +| 209, 244 | `station.clone()`, `station_ref.clone()` | +| 230-231 | ベクタフィルタリング時の clone | + +```rust +// 行230-231: ベクタ操作時のクローン +let mut lines: Vec = lines + .iter() + .filter(|&l| { ... }) + .cloned() // <-- 64フィールドの構造体を全てクローン + .collect(); +``` + +**影響**: Station が 64 フィールド × clone → メモリ使用量増加、不要なアロケーション + +--- + +## 中優先度の技術負債 + +### 4. メソッド命名の問題 + +| ファイル | 行番号 | 問題 | +|----------|--------|------| +| `stationapi/src/domain/repository/line_repository.rs` | 23 | `// FIXME: もっとマシな命名` - `get_by_line_group_id_vec_for_routes()` | + +命名規則が不明確で、メソッドの意図が分かりにくい。 + +--- + +### 5. 複雑な SQL クエリ + +- **ファイル**: `stationapi/src/infrastructure/station_repository.rs:950-1088` +- **クエリ長**: 140行以上のマルチレベル CTE (Common Table Expression) + +**問題点**: +- 駅名検索で複数の言語フィールド (`LIKE $2-$6`) をサポート +- 同等の処理が複数メソッドで繰り返される +- クエリの設計意図がドキュメント化されていない + +**繰り返されるクエリパターン**: +- `find_by_id()`: 基本的な単一駅取得 +- `get_by_line_id()`: 路線別駅取得 +- `get_by_station_group_id()`: グループ別駅取得 +- `get_route_stops()`: 経路駅停止条件処理 + +--- + +### 6. 死んだコード (Dead Code) + +```rust +// stationapi/src/infrastructure/station_repository.rs:25 +#[allow(dead_code)] +pub station_name_rn: Option, +``` + +--- + +### 7. ハードコードされた値 + +| ファイル | 行番号 | 値 | 用途 | +|----------|--------|-----|------| +| `stationapi/src/infrastructure/station_repository.rs` | 1494 | `"99991231"` | 閉鎖駅の終了日付 | +| `stationapi/src/domain/normalize.rs` | 8 | `0x60` | Unicode 正規化 | +| `stationapi/src/domain/normalize.rs` | 11, 14 | `0xFEE0` | Unicode 正規化 | + +これらの値は定数として定義し、意味を明確にすべきです。 + +--- + +### 8. マッピング処理の複雑性 + +- **ファイル**: `stationapi/src/use_case/interactor/query.rs:292-349` + +```rust +// 線号シンボル(1-4)を手動で配列に変換 +let line_symbols_raw = [ + &station.line_symbol1, + &station.line_symbol2, + &station.line_symbol3, + &station.line_symbol4, +]; +let station_numbers_raw = [ + station.station_number1.as_deref().unwrap_or_default(), + // ... (4つすべて手動で列挙) +]; +``` + +--- + +## 低優先度の技術負債 + +### 9. アーキテクチャドキュメント不足 + +> **ステータス**: ✅ **対応済み** (2026年1月) +> +> [docs/architecture.md](./architecture.md) にて以下を文書化しました。 + +#### 対応済みの領域 + +| 領域 | 対応状況 | +|------|----------| +| アーキテクチャドキュメント | ✅ 4層構造 (Domain/UseCase/Infrastructure/Presentation) の設計思想を文書化 | +| 命名規則 | ✅ Row 構造体と Entity の区別を明確化 | +| キャッシュ戦略 | ✅ バッチクエリによる暗黙的キャッシュと設計判断を文書化 (query.rs:169-265) | +| データフロー | ✅ リクエストフローとエラー伝播チェーンを図示 | + +#### 残存する課題 + +| 領域 | 内容 | +|------|------| +| SQL 設計ドキュメント | 複雑なクエリの使用意図がインラインコメントに留まる | + +--- + +### 10. テスト関連 + +#### 現状 + +- **テスト関数数**: 200個 +- **テスト範囲**: Repository 層中心 +- **テストドキュメント**: `docs/repository_testing.md` + +#### 不足している領域 + +| 領域 | 状態 | +|------|------| +| gRPC コントローラーテスト | `src/presentation/controller/grpc.rs` (353行) がテスト対象外 | +| End-to-End テスト | なし | +| パフォーマンステスト | なし | + +--- + +## 良好な点 + +### セキュリティ + +- **Unsafe コード**: なし +- **SQL インジェクション対策**: sqlx! マクロで型安全 +- **認証・認可**: gRPC レベルで実装あり + +### CI/CD パイプライン + +- **ファイル**: `.github/workflows/ci.yml` +- **実行内容**: + - `cargo check` - コンパイルチェック + - `cargo test` - テスト実行 + - `cargo fmt --check` - コードフォーマット検証 + - `cargo clippy -- -D warnings` - Lint チェック (警告は ERROR) + +### 依存関係 + +| パッケージ | バージョン | 状態 | +|-----------|----------|------| +| tokio | 1.28.0 | 問題なし | +| sqlx | 0.8.3 | 最新近い | +| tonic | 0.12.3 | 最新近い | +| serde | 1.0.189 | 最新 | + +### エラーハンドリング + +- 17個のエラーハンドリングテストが実装済み + +--- + +## 改善提案 + +### 短期改善 + +1. **SQL 最適化**: `get_route_stops` でのフィルタリングを SQL 側に移動 +2. **Clone 削減**: 参照ベースの処理を検討 +3. **命名改善**: `get_by_line_group_id_vec_for_routes()` をより明確な名前に変更 +4. **定数化**: ハードコードされた値を定数として定義 + +### 中期改善 + +1. **Station 構造体リファクタリング** + - `StationCore` (基本情報) と `StationDetails` (関連データ) に分割 +2. **DTO レイヤーの標準化** + - 自動コード生成ツール導入 + - Row → Entity → Protobuf の一貫性確保 +3. **プレゼンテーション層テスト** + - gRPC controller テスト追加 + +### 長期改善 + +1. **パフォーマンス最適化** + - クエリ計画の再検討 + - キャッシング戦略の導入 +2. **エラーハンドリング統一** + - domain, use_case, presentation 層での戦略統一 + +--- + +## 優先度別サマリー + +| 優先度 | 項目 | ファイル | 影響 | +|--------|------|---------|------| +| **高** | Station 構造体の設計見直し | `src/domain/entity/station.rs` | 保守性、パフォーマンス | +| **高** | SQL クエリの最適化 (TODO対応) | `src/use_case/interactor/query.rs:604,702` | パフォーマンス | +| **高** | Clone の過度な使用削減 | `src/use_case/interactor/query.rs` | メモリ効率 | +| ~~高~~ | ~~アーキテクチャドキュメント作成~~ | ✅ 対応済み ([docs/architecture.md](./architecture.md)) | オンボーディング、保守性 | +| **中** | Row 構造体のコード生成検討 | `src/infrastructure/*.rs` | メンテナンス性 | +| **中** | メソッド命名の改善 | `src/domain/repository/line_repository.rs:23` | 可読性 | +| **中** | ハードコード値の定数化 | 複数ファイル | 保守性 | +| **低** | get_connected_stations の実装 | `src/use_case/interactor/query.rs:843` | 機能完成度 | +| **低** | UI レイヤーのテスト追加 | `src/presentation/` | テストカバレッジ | diff --git a/stationapi/Cargo.toml b/stationapi/Cargo.toml index 26227987..29a1b36a 100644 --- a/stationapi/Cargo.toml +++ b/stationapi/Cargo.toml @@ -13,6 +13,7 @@ sqlx = { version = "0.8.3", features = [ "postgres", "macros", "derive", + "chrono", ] } tokio = { version = "1.28.0", features = ["full"] } tonic = "0.12.3" @@ -26,6 +27,9 @@ serde_json = "1.0.107" tonic-health = "0.12.3" tonic-reflection = "0.12.3" csv = "1.3.1" +chrono = ">=0.4.20" +reqwest = { version = "0.12.12", default-features = false, features = ["blocking", "rustls-tls"] } +zip = ">=2.3.0" [build-dependencies] tonic-build = "0.12.3" diff --git a/stationapi/proto b/stationapi/proto index ca2cd391..0e77c9f2 160000 --- a/stationapi/proto +++ b/stationapi/proto @@ -1 +1 @@ -Subproject commit ca2cd3914898628f01f714fc2a222390126cdf72 +Subproject commit 0e77c9f216605475d3b8ed7e526df8d8c8d223c6 diff --git a/stationapi/src/config.rs b/stationapi/src/config.rs new file mode 100644 index 00000000..62957a38 --- /dev/null +++ b/stationapi/src/config.rs @@ -0,0 +1,11 @@ +use std::env; + +/// Fetch the DATABASE_URL environment variable. +/// Panics if the variable is not set or is not valid Unicode. +pub fn fetch_database_url() -> String { + match env::var("DATABASE_URL") { + Ok(s) => s, + Err(env::VarError::NotPresent) => panic!("$DATABASE_URL is not set."), + Err(env::VarError::NotUnicode(_)) => panic!("$DATABASE_URL should be written in Unicode."), + } +} diff --git a/stationapi/src/domain/entity.rs b/stationapi/src/domain/entity.rs index 64eb6f5c..d981be76 100644 --- a/stationapi/src/domain/entity.rs +++ b/stationapi/src/domain/entity.rs @@ -1,5 +1,6 @@ pub mod company; pub mod connection; +pub mod gtfs; pub mod line; pub mod line_symbol; pub mod station; diff --git a/stationapi/src/domain/entity/gtfs.rs b/stationapi/src/domain/entity/gtfs.rs new file mode 100644 index 00000000..71b657c1 --- /dev/null +++ b/stationapi/src/domain/entity/gtfs.rs @@ -0,0 +1,584 @@ +use serde::{Deserialize, Serialize}; + +/// GTFS Agency (Bus operator) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsAgency { + pub agency_id: String, + pub agency_name: String, + pub agency_name_k: Option, + pub agency_name_r: Option, + pub agency_name_zh: Option, + pub agency_name_ko: Option, + pub agency_url: Option, + pub agency_timezone: String, + pub agency_lang: Option, + pub agency_phone: Option, + pub agency_fare_url: Option, + pub company_cd: Option, +} + +impl GtfsAgency { + #[allow(clippy::too_many_arguments)] + pub fn new( + agency_id: String, + agency_name: String, + agency_name_k: Option, + agency_name_r: Option, + agency_name_zh: Option, + agency_name_ko: Option, + agency_url: Option, + agency_timezone: String, + agency_lang: Option, + agency_phone: Option, + agency_fare_url: Option, + company_cd: Option, + ) -> Self { + Self { + agency_id, + agency_name, + agency_name_k, + agency_name_r, + agency_name_zh, + agency_name_ko, + agency_url, + agency_timezone, + agency_lang, + agency_phone, + agency_fare_url, + company_cd, + } + } +} + +/// GTFS Route (Bus line) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsRoute { + pub route_id: String, + pub agency_id: Option, + pub route_short_name: Option, + pub route_long_name: Option, + pub route_long_name_k: Option, + pub route_long_name_r: Option, + pub route_long_name_zh: Option, + pub route_long_name_ko: Option, + pub route_desc: Option, + pub route_type: i32, + pub route_url: Option, + pub route_color: Option, + pub route_text_color: Option, + pub route_sort_order: Option, + pub line_cd: Option, +} + +impl GtfsRoute { + #[allow(clippy::too_many_arguments)] + pub fn new( + route_id: String, + agency_id: Option, + route_short_name: Option, + route_long_name: Option, + route_long_name_k: Option, + route_long_name_r: Option, + route_long_name_zh: Option, + route_long_name_ko: Option, + route_desc: Option, + route_type: i32, + route_url: Option, + route_color: Option, + route_text_color: Option, + route_sort_order: Option, + line_cd: Option, + ) -> Self { + Self { + route_id, + agency_id, + route_short_name, + route_long_name, + route_long_name_k, + route_long_name_r, + route_long_name_zh, + route_long_name_ko, + route_desc, + route_type, + route_url, + route_color, + route_text_color, + route_sort_order, + line_cd, + } + } +} + +/// GTFS Stop (Bus stop) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsStop { + pub stop_id: String, + pub stop_code: Option, + pub stop_name: String, + pub stop_name_k: Option, + pub stop_name_r: Option, + pub stop_name_zh: Option, + pub stop_name_ko: Option, + pub stop_desc: Option, + pub stop_lat: f64, + pub stop_lon: f64, + pub zone_id: Option, + pub stop_url: Option, + pub location_type: Option, + pub parent_station: Option, + pub stop_timezone: Option, + pub wheelchair_boarding: Option, + pub platform_code: Option, + pub station_cd: Option, +} + +impl GtfsStop { + #[allow(clippy::too_many_arguments)] + pub fn new( + stop_id: String, + stop_code: Option, + stop_name: String, + stop_name_k: Option, + stop_name_r: Option, + stop_name_zh: Option, + stop_name_ko: Option, + stop_desc: Option, + stop_lat: f64, + stop_lon: f64, + zone_id: Option, + stop_url: Option, + location_type: Option, + parent_station: Option, + stop_timezone: Option, + wheelchair_boarding: Option, + platform_code: Option, + station_cd: Option, + ) -> Self { + Self { + stop_id, + stop_code, + stop_name, + stop_name_k, + stop_name_r, + stop_name_zh, + stop_name_ko, + stop_desc, + stop_lat, + stop_lon, + zone_id, + stop_url, + location_type, + parent_station, + stop_timezone, + wheelchair_boarding, + platform_code, + station_cd, + } + } +} + +/// GTFS Calendar (Service schedule) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsCalendar { + pub service_id: String, + pub monday: bool, + pub tuesday: bool, + pub wednesday: bool, + pub thursday: bool, + pub friday: bool, + pub saturday: bool, + pub sunday: bool, + pub start_date: String, + pub end_date: String, +} + +impl GtfsCalendar { + #[allow(clippy::too_many_arguments)] + pub fn new( + service_id: String, + monday: bool, + tuesday: bool, + wednesday: bool, + thursday: bool, + friday: bool, + saturday: bool, + sunday: bool, + start_date: String, + end_date: String, + ) -> Self { + Self { + service_id, + monday, + tuesday, + wednesday, + thursday, + friday, + saturday, + sunday, + start_date, + end_date, + } + } + + /// Check if the service runs on a given weekday (0 = Monday, 6 = Sunday) + pub fn runs_on_weekday(&self, weekday: u32) -> bool { + match weekday { + 0 => self.monday, + 1 => self.tuesday, + 2 => self.wednesday, + 3 => self.thursday, + 4 => self.friday, + 5 => self.saturday, + 6 => self.sunday, + _ => false, + } + } +} + +/// GTFS Calendar Date (Service exception) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsCalendarDate { + pub id: i32, + pub service_id: String, + pub date: String, + pub exception_type: i32, // 1: added, 2: removed +} + +impl GtfsCalendarDate { + pub fn new(id: i32, service_id: String, date: String, exception_type: i32) -> Self { + Self { + id, + service_id, + date, + exception_type, + } + } + + /// Check if this exception adds the service on this date + pub fn is_added(&self) -> bool { + self.exception_type == 1 + } + + /// Check if this exception removes the service on this date + pub fn is_removed(&self) -> bool { + self.exception_type == 2 + } +} + +/// GTFS Trip (Single bus trip/journey) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsTrip { + pub trip_id: String, + pub route_id: String, + pub service_id: String, + pub trip_headsign: Option, + pub trip_headsign_k: Option, + pub trip_headsign_r: Option, + pub trip_short_name: Option, + pub direction_id: Option, + pub block_id: Option, + pub shape_id: Option, + pub wheelchair_accessible: Option, + pub bikes_allowed: Option, +} + +impl GtfsTrip { + #[allow(clippy::too_many_arguments)] + pub fn new( + trip_id: String, + route_id: String, + service_id: String, + trip_headsign: Option, + trip_headsign_k: Option, + trip_headsign_r: Option, + trip_short_name: Option, + direction_id: Option, + block_id: Option, + shape_id: Option, + wheelchair_accessible: Option, + bikes_allowed: Option, + ) -> Self { + Self { + trip_id, + route_id, + service_id, + trip_headsign, + trip_headsign_k, + trip_headsign_r, + trip_short_name, + direction_id, + block_id, + shape_id, + wheelchair_accessible, + bikes_allowed, + } + } +} + +/// GTFS Stop Time (Timetable entry) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsStopTime { + pub id: i32, + pub trip_id: String, + pub arrival_time: Option, + pub departure_time: Option, + pub stop_id: String, + pub stop_sequence: i32, + pub stop_headsign: Option, + pub pickup_type: Option, + pub drop_off_type: Option, + pub shape_dist_traveled: Option, + pub timepoint: Option, +} + +impl GtfsStopTime { + #[allow(clippy::too_many_arguments)] + pub fn new( + id: i32, + trip_id: String, + arrival_time: Option, + departure_time: Option, + stop_id: String, + stop_sequence: i32, + stop_headsign: Option, + pickup_type: Option, + drop_off_type: Option, + shape_dist_traveled: Option, + timepoint: Option, + ) -> Self { + Self { + id, + trip_id, + arrival_time, + departure_time, + stop_id, + stop_sequence, + stop_headsign, + pickup_type, + drop_off_type, + shape_dist_traveled, + timepoint, + } + } +} + +/// GTFS Shape Point (Route geometry point) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsShapePoint { + pub id: i32, + pub shape_id: String, + pub shape_pt_lat: f64, + pub shape_pt_lon: f64, + pub shape_pt_sequence: i32, + pub shape_dist_traveled: Option, +} + +impl GtfsShapePoint { + pub fn new( + id: i32, + shape_id: String, + shape_pt_lat: f64, + shape_pt_lon: f64, + shape_pt_sequence: i32, + shape_dist_traveled: Option, + ) -> Self { + Self { + id, + shape_id, + shape_pt_lat, + shape_pt_lon, + shape_pt_sequence, + shape_dist_traveled, + } + } +} + +/// GTFS Feed Info (Feed metadata) +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct GtfsFeedInfo { + pub id: i32, + pub feed_publisher_name: String, + pub feed_publisher_url: Option, + pub feed_lang: Option, + pub feed_start_date: Option, + pub feed_end_date: Option, + pub feed_version: Option, + pub feed_contact_email: Option, + pub feed_contact_url: Option, + pub imported_at: Option, +} + +impl GtfsFeedInfo { + #[allow(clippy::too_many_arguments)] + pub fn new( + id: i32, + feed_publisher_name: String, + feed_publisher_url: Option, + feed_lang: Option, + feed_start_date: Option, + feed_end_date: Option, + feed_version: Option, + feed_contact_email: Option, + feed_contact_url: Option, + imported_at: Option, + ) -> Self { + Self { + id, + feed_publisher_name, + feed_publisher_url, + feed_lang, + feed_start_date, + feed_end_date, + feed_version, + feed_contact_email, + feed_contact_url, + imported_at, + } + } +} + +/// Transport type enum for distinguishing rail and bus +#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, Default)] +#[repr(i32)] +pub enum TransportType { + #[default] + Rail = 0, + Bus = 1, +} + +impl From for TransportType { + fn from(value: i32) -> Self { + match value { + 1 => TransportType::Bus, + _ => TransportType::Rail, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_gtfs_agency_new() { + let agency = GtfsAgency::new( + "toei".to_string(), + "東京都交通局".to_string(), + Some("トウキョウトコウツウキョク".to_string()), + Some("Tokyo Metropolitan Bureau of Transportation".to_string()), + Some("东京都交通局".to_string()), + Some("도쿄도 교통국".to_string()), + Some("https://www.kotsu.metro.tokyo.jp/".to_string()), + "Asia/Tokyo".to_string(), + Some("ja".to_string()), + Some("03-3816-5700".to_string()), + None, + Some(1001), + ); + + assert_eq!(agency.agency_id, "toei"); + assert_eq!(agency.agency_name, "東京都交通局"); + assert_eq!(agency.company_cd, Some(1001)); + } + + #[test] + fn test_gtfs_route_new() { + let route = GtfsRoute::new( + "toei_bus_01".to_string(), + Some("toei".to_string()), + Some("都01".to_string()), + Some("渋谷駅~新橋駅".to_string()), + None, + None, + None, + None, + None, + 3, + None, + Some("FF0000".to_string()), + Some("FFFFFF".to_string()), + Some(1), + None, + ); + + assert_eq!(route.route_id, "toei_bus_01"); + assert_eq!(route.route_short_name, Some("都01".to_string())); + assert_eq!(route.route_type, 3); + } + + #[test] + fn test_gtfs_stop_new() { + let stop = GtfsStop::new( + "stop_001".to_string(), + Some("001".to_string()), + "渋谷駅前".to_string(), + Some("シブヤエキマエ".to_string()), + Some("Shibuya Station".to_string()), + None, + None, + None, + 35.658034, + 139.701636, + None, + None, + Some(0), + None, + None, + Some(1), + None, + None, + ); + + assert_eq!(stop.stop_id, "stop_001"); + assert_eq!(stop.stop_name, "渋谷駅前"); + assert!((stop.stop_lat - 35.658034).abs() < 0.0001); + } + + #[test] + fn test_gtfs_calendar_runs_on_weekday() { + let calendar = GtfsCalendar::new( + "weekday".to_string(), + true, + true, + true, + true, + true, + false, + false, + "20240101".to_string(), + "20241231".to_string(), + ); + + assert!(calendar.runs_on_weekday(0)); // Monday + assert!(calendar.runs_on_weekday(4)); // Friday + assert!(!calendar.runs_on_weekday(5)); // Saturday + assert!(!calendar.runs_on_weekday(6)); // Sunday + } + + #[test] + fn test_gtfs_calendar_date_exception_type() { + let added = GtfsCalendarDate::new(1, "service1".to_string(), "20240101".to_string(), 1); + let removed = GtfsCalendarDate::new(2, "service1".to_string(), "20240102".to_string(), 2); + + assert!(added.is_added()); + assert!(!added.is_removed()); + assert!(!removed.is_added()); + assert!(removed.is_removed()); + } + + #[test] + fn test_transport_type_conversion() { + assert_eq!(TransportType::from(0), TransportType::Rail); + assert_eq!(TransportType::from(1), TransportType::Bus); + assert_eq!(TransportType::from(99), TransportType::Rail); // Default to Rail + + assert_eq!(TransportType::Rail as i32, 0); + assert_eq!(TransportType::Bus as i32, 1); + } + + #[test] + fn test_transport_type_default() { + let default_type: TransportType = Default::default(); + assert_eq!(default_type, TransportType::Rail); + } +} diff --git a/stationapi/src/domain/entity/line.rs b/stationapi/src/domain/entity/line.rs index e1b1bcdd..85a2686f 100644 --- a/stationapi/src/domain/entity/line.rs +++ b/stationapi/src/domain/entity/line.rs @@ -1,6 +1,9 @@ use serde::{Deserialize, Serialize}; -use super::{company::Company, line_symbol::LineSymbol, station::Station, train_type::TrainType}; +use super::{ + company::Company, gtfs::TransportType, line_symbol::LineSymbol, station::Station, + train_type::TrainType, +}; #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] pub struct Line { @@ -37,6 +40,7 @@ pub struct Line { pub station_cd: Option, pub station_g_cd: Option, pub type_cd: Option, + pub transport_type: TransportType, } impl Line { @@ -75,6 +79,7 @@ impl Line { station_g_cd: Option, average_distance: Option, type_cd: Option, + transport_type: TransportType, ) -> Self { Self { line_cd, @@ -110,6 +115,7 @@ impl Line { station_g_cd, average_distance, type_cd, + transport_type, } } } @@ -178,6 +184,7 @@ mod tests { Some(1130201), // station_g_cd Some(0.97), // average_distance Some(1), // type_cd + TransportType::Rail, // transport_type ) } @@ -216,6 +223,7 @@ mod tests { None, // station_g_cd None, // average_distance None, // type_cd + TransportType::Rail, // transport_type ) } @@ -403,6 +411,7 @@ mod tests { None, Some(0.97), Some(2), + TransportType::Rail, ); assert_eq!(line.line_symbols.len(), 2); @@ -491,6 +500,7 @@ mod tests { None, Some(515.4), Some(7), + TransportType::Rail, ); assert_eq!(line.line_name, "東海道新幹線"); @@ -538,6 +548,7 @@ mod tests { Some(0), Some(0.0), Some(0), + TransportType::Rail, ); assert_eq!(line.line_name, ""); @@ -586,6 +597,7 @@ mod tests { Some(-1), Some(-1.0), Some(-1), + TransportType::Rail, ); assert_eq!(line.line_cd, -1); @@ -647,6 +659,7 @@ mod tests { None, Some(1.0), type_cd, + TransportType::Rail, ); assert_eq!(line.type_cd, type_cd); @@ -690,6 +703,7 @@ mod tests { Some(7777), Some(123.45), Some(99), + TransportType::Rail, ); // すべてのOptionalフィールドがSomeであることを確認 @@ -760,6 +774,7 @@ mod tests { None, Some(1.0), Some(42), + TransportType::Rail, ); let json = serde_json::to_string(&line_with_type).expect("シリアライゼーションに失敗"); @@ -768,4 +783,52 @@ mod tests { let deserialized: Line = serde_json::from_str(&json).expect("デシリアライゼーションに失敗"); assert_eq!(deserialized.type_cd, Some(42)); } + + #[test] + fn test_line_with_bus_transport_type() { + let bus_line = Line::new( + 99001, // line_cd + 2001, // company_cd + None, // company + "都01系統".to_string(), // line_name + "ト01ケイトウ".to_string(), // line_name_k + "と01けいとう".to_string(), // line_name_h + Some("Toei 01".to_string()), // line_name_r + None, // line_name_zh + None, // line_name_ko + Some("#00A0E9".to_string()), // line_color_c + None, // line_type + vec![], // line_symbols + None, // line_symbol1 + None, // line_symbol2 + None, // line_symbol3 + None, // line_symbol4 + None, // line_symbol1_color + None, // line_symbol2_color + None, // line_symbol3_color + None, // line_symbol4_color + None, // line_symbol1_shape + None, // line_symbol2_shape + None, // line_symbol3_shape + None, // line_symbol4_shape + 0, // e_status + 1, // e_sort + None, // station + None, // train_type + None, // line_group_cd + None, // station_cd + None, // station_g_cd + None, // average_distance + None, // type_cd + TransportType::Bus, // transport_type + ); + + assert_eq!(bus_line.line_cd, 99001); + assert_eq!(bus_line.line_name, "都01系統"); + assert_eq!(bus_line.transport_type, TransportType::Bus); + + // JSONシリアライゼーションでtransport_typeが正しく出力されることを確認 + let json = serde_json::to_string(&bus_line).expect("シリアライゼーションに失敗"); + assert!(json.contains("\"transport_type\":\"Bus\"")); // Bus enum variant + } } diff --git a/stationapi/src/domain/entity/station.rs b/stationapi/src/domain/entity/station.rs index e538ef98..c72e0587 100644 --- a/stationapi/src/domain/entity/station.rs +++ b/stationapi/src/domain/entity/station.rs @@ -2,7 +2,10 @@ use serde::{Deserialize, Serialize}; use crate::proto::StopCondition; -use super::{line::Line, station_number::StationNumber, train_type::TrainType as TrainTypeEntity}; +use super::{ + gtfs::TransportType, line::Line, station_number::StationNumber, + train_type::TrainType as TrainTypeEntity, +}; #[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] pub struct Station { @@ -73,6 +76,7 @@ pub struct Station { pub color: Option, pub direction: Option, pub kind: Option, + pub transport_type: TransportType, } impl Station { @@ -142,6 +146,7 @@ impl Station { color: Option, direction: Option, kind: Option, + transport_type: TransportType, ) -> Self { Self { station_cd, @@ -208,6 +213,7 @@ impl Station { color, direction, kind, + transport_type, } } } @@ -261,6 +267,7 @@ mod tests { Some(1130201), // station_g_cd Some(0.97), // average_distance None, // type_cd + TransportType::Rail, // transport_type ) } @@ -348,6 +355,7 @@ mod tests { Some("#0066CC".to_string()), // color Some(0), // direction Some(1), // kind + TransportType::Rail, // transport_type ) } @@ -417,6 +425,7 @@ mod tests { None, // color None, // direction None, // kind + TransportType::Rail, // transport_type ) } @@ -734,4 +743,83 @@ mod tests { assert_eq!(station.lines[0].line_cd, 11302); assert_eq!(station.lines[0].line_name, "山手線"); } + + #[test] + fn test_station_with_bus_transport_type() { + let bus_stop = Station::new( + 9000001, // station_cd + 9000001, // station_g_cd + "新宿駅前".to_string(), // station_name + "シンジュクエキマエ".to_string(), // station_name_k + Some("Shinjuku Sta.".to_string()), // station_name_r + None, // station_name_zh + None, // station_name_ko + vec![], // station_numbers + None, // station_number1 + None, // station_number2 + None, // station_number3 + None, // station_number4 + None, // three_letter_code + 99001, // line_cd + None, // line + vec![], // lines + 13, // pref_cd (東京都) + "".to_string(), // post + "".to_string(), // address + 139.700464, // lon + 35.689738, // lat + "".to_string(), // open_ymd + "".to_string(), // close_ymd + 0, // e_status + 1, // e_sort + StopCondition::All, // stop_condition + None, // distance + false, // has_train_types + None, // train_type + None, // company_cd + None, // line_name + None, // line_name_k + None, // line_name_h + None, // line_name_r + None, // line_name_zh + None, // line_name_ko + None, // line_color_c + None, // line_type + None, // line_symbol1 + None, // line_symbol2 + None, // line_symbol3 + None, // line_symbol4 + None, // line_symbol1_color + None, // line_symbol2_color + None, // line_symbol3_color + None, // line_symbol4_color + None, // line_symbol1_shape + None, // line_symbol2_shape + None, // line_symbol3_shape + None, // line_symbol4_shape + None, // line_group_cd + None, // average_distance + None, // pass + None, // type_id + None, // sst_id + None, // type_cd + None, // type_name + None, // type_name_k + None, // type_name_r + None, // type_name_zh + None, // type_name_ko + None, // color + None, // direction + None, // kind + TransportType::Bus, // transport_type + ); + + assert_eq!(bus_stop.station_cd, 9000001); + assert_eq!(bus_stop.station_name, "新宿駅前"); + assert_eq!(bus_stop.transport_type, TransportType::Bus); + + // JSONシリアライゼーションでtransport_typeが正しく出力されることを確認 + let json = serde_json::to_string(&bus_stop).expect("シリアライゼーションに失敗"); + assert!(json.contains("\"transport_type\":\"Bus\"")); // Bus enum variant + } } diff --git a/stationapi/src/domain/entity/train_type.rs b/stationapi/src/domain/entity/train_type.rs index f2e83b86..c30e6a68 100644 --- a/stationapi/src/domain/entity/train_type.rs +++ b/stationapi/src/domain/entity/train_type.rs @@ -61,6 +61,7 @@ impl TrainType { #[cfg(test)] mod tests { use super::*; + use crate::domain::entity::gtfs::TransportType; fn create_test_line() -> Line { Line::new( @@ -97,6 +98,7 @@ mod tests { Some(1130201), // station_g_cd Some(0.97), // average_distance None, // type_cd + TransportType::Rail, // transport_type ) } diff --git a/stationapi/src/domain/repository.rs b/stationapi/src/domain/repository.rs index 075b4dfe..c8aefab1 100644 --- a/stationapi/src/domain/repository.rs +++ b/stationapi/src/domain/repository.rs @@ -1,4 +1,5 @@ pub mod company_repository; +pub mod gtfs_repository; pub mod line_repository; pub mod station_repository; pub mod train_type_repository; diff --git a/stationapi/src/domain/repository/gtfs_repository.rs b/stationapi/src/domain/repository/gtfs_repository.rs new file mode 100644 index 00000000..2db10f59 --- /dev/null +++ b/stationapi/src/domain/repository/gtfs_repository.rs @@ -0,0 +1,100 @@ +use async_trait::async_trait; + +use crate::domain::{ + entity::gtfs::{ + GtfsAgency, GtfsCalendar, GtfsCalendarDate, GtfsFeedInfo, GtfsRoute, GtfsShapePoint, + GtfsStop, GtfsStopTime, GtfsTrip, + }, + error::DomainError, +}; + +/// Repository trait for GTFS Agency operations +#[async_trait] +pub trait GtfsAgencyRepository: Send + Sync { + async fn find_by_id(&self, agency_id: &str) -> Result, DomainError>; + async fn get_all(&self) -> Result, DomainError>; + async fn get_by_company_cd(&self, company_cd: i32) -> Result, DomainError>; +} + +/// Repository trait for GTFS Route operations +#[async_trait] +pub trait GtfsRouteRepository: Send + Sync { + async fn find_by_id(&self, route_id: &str) -> Result, DomainError>; + async fn get_by_agency_id(&self, agency_id: &str) -> Result, DomainError>; + async fn get_by_line_cd(&self, line_cd: i32) -> Result, DomainError>; + async fn search_by_name( + &self, + name: &str, + limit: Option, + ) -> Result, DomainError>; +} + +/// Repository trait for GTFS Stop operations +#[async_trait] +pub trait GtfsStopRepository: Send + Sync { + async fn find_by_id(&self, stop_id: &str) -> Result, DomainError>; + async fn get_by_station_cd(&self, station_cd: i32) -> Result, DomainError>; + async fn get_by_coordinates( + &self, + latitude: f64, + longitude: f64, + limit: Option, + ) -> Result, DomainError>; + async fn search_by_name( + &self, + name: &str, + limit: Option, + ) -> Result, DomainError>; + async fn get_by_route_id(&self, route_id: &str) -> Result, DomainError>; +} + +/// Repository trait for GTFS Calendar operations +#[async_trait] +pub trait GtfsCalendarRepository: Send + Sync { + async fn find_by_id(&self, service_id: &str) -> Result, DomainError>; + async fn get_active_on_date(&self, date: &str) -> Result, DomainError>; +} + +/// Repository trait for GTFS Calendar Date operations +#[async_trait] +pub trait GtfsCalendarDateRepository: Send + Sync { + async fn get_by_service_id( + &self, + service_id: &str, + ) -> Result, DomainError>; + async fn get_by_date(&self, date: &str) -> Result, DomainError>; +} + +/// Repository trait for GTFS Trip operations +#[async_trait] +pub trait GtfsTripRepository: Send + Sync { + async fn find_by_id(&self, trip_id: &str) -> Result, DomainError>; + async fn get_by_route_id(&self, route_id: &str) -> Result, DomainError>; + async fn get_by_service_id(&self, service_id: &str) -> Result, DomainError>; +} + +/// Repository trait for GTFS Stop Time operations +#[async_trait] +pub trait GtfsStopTimeRepository: Send + Sync { + async fn get_by_trip_id(&self, trip_id: &str) -> Result, DomainError>; + async fn get_by_stop_id(&self, stop_id: &str) -> Result, DomainError>; + async fn get_departures_at_stop( + &self, + stop_id: &str, + from_time: &str, + limit: Option, + ) -> Result, DomainError>; +} + +/// Repository trait for GTFS Shape operations +#[async_trait] +pub trait GtfsShapeRepository: Send + Sync { + async fn get_by_shape_id(&self, shape_id: &str) -> Result, DomainError>; +} + +/// Repository trait for GTFS Feed Info operations +#[async_trait] +pub trait GtfsFeedInfoRepository: Send + Sync { + async fn get_latest(&self) -> Result, DomainError>; + async fn get_all(&self) -> Result, DomainError>; +} diff --git a/stationapi/src/domain/repository/line_repository.rs b/stationapi/src/domain/repository/line_repository.rs index 4f1702e6..15c7f16d 100644 --- a/stationapi/src/domain/repository/line_repository.rs +++ b/stationapi/src/domain/repository/line_repository.rs @@ -35,7 +35,7 @@ pub trait LineRepository: Send + Sync + 'static { #[cfg(test)] mod tests { use super::*; - use crate::domain::entity::{company::Company, line_symbol::LineSymbol}; + use crate::domain::entity::{company::Company, gtfs::TransportType, line_symbol::LineSymbol}; use std::collections::HashMap; // テスト用のモック実装 @@ -118,6 +118,7 @@ mod tests { Some(1), Some(1075.968412), Some(0), + TransportType::Rail, ); // 京浜東北線 @@ -155,6 +156,7 @@ mod tests { Some(2), Some(1234.567890), Some(1), + TransportType::Rail, ); // データを格納 diff --git a/stationapi/src/domain/repository/station_repository.rs b/stationapi/src/domain/repository/station_repository.rs index fdd2b346..7e28cc0f 100644 --- a/stationapi/src/domain/repository/station_repository.rs +++ b/stationapi/src/domain/repository/station_repository.rs @@ -1,6 +1,9 @@ use async_trait::async_trait; -use crate::domain::{entity::station::Station, error::DomainError}; +use crate::domain::{ + entity::{gtfs::TransportType, station::Station}, + error::DomainError, +}; #[async_trait] pub trait StationRepository: Send + Sync + 'static { @@ -10,6 +13,7 @@ pub trait StationRepository: Send + Sync + 'static { &self, line_id: u32, station_id: Option, + direction_id: Option, ) -> Result, DomainError>; async fn get_by_station_group_id( &self, @@ -24,12 +28,14 @@ pub trait StationRepository: Send + Sync + 'static { latitude: f64, longitude: f64, limit: Option, + transport_type: Option, ) -> Result, DomainError>; async fn get_by_name( &self, station_name: String, limit: Option, from_station_group_id: Option, + transport_type: Option, ) -> Result, DomainError>; async fn get_by_line_group_id(&self, line_group_id: u32) -> Result, DomainError>; async fn get_route_stops( @@ -90,6 +96,7 @@ mod tests { &self, line_id: u32, _station_id: Option, + _direction_id: Option, ) -> Result, DomainError> { let result: Vec = self .stations @@ -131,10 +138,16 @@ mod tests { latitude: f64, longitude: f64, limit: Option, + transport_type: Option, ) -> Result, DomainError> { let mut result: Vec = self .stations .values() + .filter(|station| { + transport_type + .as_ref() + .map_or(true, |tt| station.transport_type == *tt) + }) .map(|station| { let mut s = station.clone(); let distance = ((station.lat - latitude).powi(2) @@ -161,11 +174,17 @@ mod tests { station_name: String, limit: Option, _from_station_group_id: Option, + transport_type: Option, ) -> Result, DomainError> { let mut result: Vec = self .stations .values() - .filter(|station| station.station_name.contains(&station_name)) + .filter(|station| { + station.station_name.contains(&station_name) + && transport_type + .as_ref() + .map_or(true, |tt| station.transport_type == *tt) + }) .cloned() .collect(); @@ -297,6 +316,7 @@ mod tests { Some("#000000".to_string()), Some(0), Some(1), + TransportType::Rail, ) } @@ -330,7 +350,7 @@ mod tests { #[tokio::test] async fn test_get_by_line_id() { let repo = MockStationRepository::new(); - let result = repo.get_by_line_id(1001, None).await.unwrap(); + let result = repo.get_by_line_id(1001, None, None).await.unwrap(); assert_eq!(result.len(), 2); // 東京駅と品川駅 assert!(result.iter().all(|s| s.line_cd == 1001)); } @@ -356,7 +376,7 @@ mod tests { let repo = MockStationRepository::new(); // 東京駅付近の座標 let result = repo - .get_by_coordinates(35.681236, 139.767125, Some(2)) + .get_by_coordinates(35.681236, 139.767125, Some(2), None) .await .unwrap(); assert!(result.len() <= 2); @@ -367,7 +387,7 @@ mod tests { async fn test_get_by_name() { let repo = MockStationRepository::new(); let result = repo - .get_by_name("東京".to_string(), None, None) + .get_by_name("東京".to_string(), None, None, None) .await .unwrap(); assert_eq!(result.len(), 1); @@ -378,7 +398,7 @@ mod tests { async fn test_get_by_name_with_limit() { let repo = MockStationRepository::new(); let result = repo - .get_by_name("駅".to_string(), Some(2), None) + .get_by_name("駅".to_string(), Some(2), None, None) .await .unwrap(); assert!(result.len() <= 2); diff --git a/stationapi/src/import.rs b/stationapi/src/import.rs new file mode 100644 index 00000000..23b9a2ed --- /dev/null +++ b/stationapi/src/import.rs @@ -0,0 +1,1790 @@ +//! Data import module for CSV and GTFS data + +use csv::{ReaderBuilder, StringRecord}; +use sqlx::{Connection, PgConnection}; +use stationapi::config::fetch_database_url; +use std::collections::HashMap; +use std::io::{Cursor, Read as _}; +use std::path::Path; +use std::{env, fs}; +use tracing::{info, warn}; +use zip::ZipArchive; + +/// Type alias for GTFS trips batch row +type TripBatchRow = ( + String, + String, + String, + Option, + Option, + Option, + Option, + Option, + Option, + Option, +); + +/// Type alias for GTFS stop_times batch row +type StopTimeBatchRow = ( + String, + Option, + Option, + String, + i32, + Option, + Option, + Option, + Option, + Option, +); + +/// Import CSV data from the data directory +pub async fn import_csv() -> Result<(), Box> { + let db_url = fetch_database_url(); + let mut conn = PgConnection::connect(&db_url).await?; + let data_path = Path::new("data"); + + // Ensure required extensions exist before running schema import + sqlx::query("CREATE EXTENSION IF NOT EXISTS pg_trgm") + .execute(&mut conn) + .await?; + + sqlx::query("CREATE EXTENSION IF NOT EXISTS btree_gist") + .execute(&mut conn) + .await?; + + let create_sql_path = data_path.join("create_table.sql"); + let create_sql_content = fs::read(&create_sql_path).map_err(|e| { + tracing::error!("Failed to read create_table.sql: {}", e); + Box::new(e) as Box + })?; + let create_sql: String = String::from_utf8_lossy(&create_sql_content).parse()?; + sqlx::raw_sql(&create_sql).execute(&mut conn).await?; + let entries = fs::read_dir(data_path).map_err(|e| { + tracing::error!("Failed to read data directory: {}", e); + Box::new(e) as Box + })?; + + let mut file_list: Vec<_> = entries + .filter_map(|entry| { + let path = entry.ok()?.path(); + if path.is_file() && path.extension()? == "csv" && path.to_string_lossy().contains('!') + { + Some(path.file_name()?.to_string_lossy().into_owned()) + } else { + None + } + }) + .collect(); + file_list.sort(); + + for file_name in &file_list { + let mut rdr = ReaderBuilder::new().from_path(data_path.join(file_name))?; + + let headers_record = rdr.headers()?; + let headers: Vec = headers_record + .into_iter() + .map(|row| row.to_string()) + .collect(); + + let mut csv_data: Vec = Vec::new(); + let records: Vec = rdr.records().filter_map(|row| row.ok()).collect(); + csv_data.extend(records); + + let table_name = match file_name.split('!').nth(1) { + Some(part) => match part.split('.').next() { + Some(name) if !name.is_empty() => name, + _ => { + tracing::warn!("Invalid file name format: {}", file_name); + continue; + } + }, + None => { + tracing::warn!("Invalid file name format: {}", file_name); + continue; + } + }; + + // Skip empty CSV files to avoid generating invalid INSERT statements + if csv_data.is_empty() { + tracing::warn!("Skipping empty CSV file: {}", file_name); + continue; + } + + let mut sql_lines_inner = Vec::new(); + sql_lines_inner.push(format!("INSERT INTO public.{table_name} VALUES ")); + + for (idx, data) in csv_data.iter().enumerate() { + let cols: Vec<_> = data + .iter() + .enumerate() + .filter_map(|(col_idx, col)| { + if headers + .get(col_idx) + .unwrap_or(&String::new()) + .starts_with('#') + { + return None; + } + + if col.is_empty() { + Some("NULL".to_string()) + } else if col == "DEFAULT" { + Some("DEFAULT".to_string()) + } else { + Some(format!( + "'{}'", + col.replace('\'', "''").replace('\\', "\\\\") + )) + } + }) + .collect(); + + let values_part = cols.join(","); + let separator = if idx == csv_data.len() - 1 { + ");" + } else { + ")," + }; + sql_lines_inner.push(format!("({values_part}{separator}")); + } + + sqlx::query(&sql_lines_inner.concat()) + .execute(&mut conn) + .await?; + } + + sqlx::query("ANALYZE;").execute(&mut conn).await?; + + info!("CSV import completed successfully."); + + Ok(()) +} + +/// Represents a translation entry from translations.txt +#[derive(Debug, Clone, Default)] +struct Translation { + ja: Option, // Japanese (default) + ja_hrkt: Option, // Hiragana/Katakana + en: Option, // English (used for romanized name) + zh: Option, // Chinese + ko: Option, // Korean +} + +/// GTFS download URL for Toei Bus +const TOEI_BUS_GTFS_URL: &str = + "https://api-public.odpt.org/api/v4/files/Toei/data/ToeiBus-GTFS.zip"; + +/// Download and extract GTFS data from ODPT API +fn download_gtfs() -> Result<(), Box> { + let gtfs_path = Path::new("data/ToeiBus-GTFS"); + + // Skip if directory already exists + if gtfs_path.exists() { + info!("GTFS directory already exists, skipping download."); + return Ok(()); + } + + info!("Downloading GTFS data from ODPT API..."); + + // Download the ZIP file + let response = reqwest::blocking::get(TOEI_BUS_GTFS_URL)?; + + if !response.status().is_success() { + return Err(format!("Failed to download GTFS: HTTP {}", response.status()).into()); + } + + let bytes = response.bytes()?; + info!("Downloaded {} bytes, extracting...", bytes.len()); + + // Create the target directory + fs::create_dir_all(gtfs_path)?; + + // Extract the ZIP file + let cursor = Cursor::new(bytes); + let mut archive = ZipArchive::new(cursor)?; + + for i in 0..archive.len() { + let mut file = archive.by_index(i)?; + let file_name = match file.enclosed_name() { + Some(name) => name.to_owned(), + None => continue, + }; + + // Skip directories and hidden files + if file.is_dir() || file_name.to_string_lossy().starts_with('.') { + continue; + } + + // Get just the file name (strip any directory prefix from ZIP) + let output_name = file_name + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| file_name.to_string_lossy().to_string()); + + let output_path = gtfs_path.join(&output_name); + + let mut contents = Vec::new(); + file.read_to_end(&mut contents)?; + fs::write(&output_path, &contents)?; + + info!("Extracted: {}", output_name); + } + + info!("GTFS extraction completed."); + Ok(()) +} + +/// Import GTFS data from ToeiBus-GTFS directory +pub async fn import_gtfs() -> Result<(), Box> { + // Check if bus feature is disabled + if is_bus_feature_disabled() { + info!("Bus feature is disabled, skipping GTFS import."); + return Ok(()); + } + + // Download GTFS data if not present (use spawn_blocking to avoid blocking async runtime) + tokio::task::spawn_blocking(download_gtfs) + .await + .map_err(|e| format!("Failed to spawn blocking task: {}", e))? + .map_err(|e| -> Box { e })?; + + let gtfs_path = Path::new("data/ToeiBus-GTFS"); + + if !gtfs_path.exists() { + info!("GTFS directory not found, skipping GTFS import."); + return Ok(()); + } + + let db_url = fetch_database_url(); + let mut conn = PgConnection::connect(&db_url).await?; + + info!("Starting GTFS import from {:?}...", gtfs_path); + + // First, clear existing GTFS data (in reverse order of dependencies) + sqlx::query("DELETE FROM gtfs_stop_times") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_trips") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_shapes") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_calendar_dates") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_calendar") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_stops") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_routes") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_agencies") + .execute(&mut conn) + .await?; + sqlx::query("DELETE FROM gtfs_feed_info") + .execute(&mut conn) + .await?; + + // Load translations for multi-language support + let translations = load_gtfs_translations(gtfs_path)?; + + // Import agencies + import_gtfs_agencies(&mut conn, gtfs_path).await?; + + // Import routes + import_gtfs_routes(&mut conn, gtfs_path).await?; + + // Import stops with translations + import_gtfs_stops(&mut conn, gtfs_path, &translations).await?; + + // Import calendar + import_gtfs_calendar(&mut conn, gtfs_path).await?; + + // Import calendar_dates + import_gtfs_calendar_dates(&mut conn, gtfs_path).await?; + + // Import shapes + import_gtfs_shapes(&mut conn, gtfs_path).await?; + + // Import trips + import_gtfs_trips(&mut conn, gtfs_path).await?; + + // Import stop_times (largest file, needs batch processing) + import_gtfs_stop_times(&mut conn, gtfs_path).await?; + + // Import feed_info + import_gtfs_feed_info(&mut conn, gtfs_path).await?; + + sqlx::query("ANALYZE;").execute(&mut conn).await?; + + info!("GTFS import completed successfully."); + + Ok(()) +} + +/// Load translations from translations.txt +fn load_gtfs_translations( + gtfs_path: &Path, +) -> Result, Box> { + let translations_path = gtfs_path.join("translations.txt"); + let mut translations: HashMap<(String, String), Translation> = HashMap::new(); + + if !translations_path.exists() { + return Ok(translations); + } + + let mut rdr = ReaderBuilder::new().from_path(&translations_path)?; + + for result in rdr.records() { + let record = result?; + // table_name,field_name,language,translation,record_id,record_sub_id,field_value + let table_name = record.get(0).unwrap_or(""); + let field_name = record.get(1).unwrap_or(""); + let language = record.get(2).unwrap_or(""); + let translation_text = record.get(3).unwrap_or(""); + let record_id = record.get(4).unwrap_or(""); + + // Only process stop_name translations for now + if table_name == "stops" && field_name == "stop_name" { + // Store translation for the exact record_id (e.g., "0001-01") + let key = ("stops".to_string(), record_id.to_string()); + let entry = translations.entry(key).or_default(); + + match language { + "ja" => entry.ja = Some(translation_text.to_string()), + "ja-Hrkt" => entry.ja_hrkt = Some(translation_text.to_string()), + "en" => entry.en = Some(translation_text.to_string()), + "zh-Hans" | "zh-Hant" | "zh" => entry.zh = Some(translation_text.to_string()), + "ko" => entry.ko = Some(translation_text.to_string()), + _ => {} + } + + // Also store translation for parent stop_id (without suffix like "-01", "-02") + // This allows parent stops (location_type=1) to find translations + if let Some(parent_id) = record_id.rfind('-').map(|pos| &record_id[..pos]) { + let parent_key = ("stops".to_string(), parent_id.to_string()); + // Only insert if not already present (first child's translation wins) + translations + .entry(parent_key) + .or_insert_with(|| Translation { + ja: None, + ja_hrkt: None, + en: None, + zh: None, + ko: None, + }); + let parent_entry = translations + .get_mut(&("stops".to_string(), parent_id.to_string())) + .unwrap(); + match language { + "ja" if parent_entry.ja.is_none() => { + parent_entry.ja = Some(translation_text.to_string()) + } + "ja-Hrkt" if parent_entry.ja_hrkt.is_none() => { + parent_entry.ja_hrkt = Some(translation_text.to_string()) + } + "en" if parent_entry.en.is_none() => { + parent_entry.en = Some(translation_text.to_string()) + } + "zh-Hans" | "zh-Hant" | "zh" if parent_entry.zh.is_none() => { + parent_entry.zh = Some(translation_text.to_string()) + } + "ko" if parent_entry.ko.is_none() => { + parent_entry.ko = Some(translation_text.to_string()) + } + _ => {} + } + } + } + } + + Ok(translations) +} + +/// Import agencies from agency.txt +async fn import_gtfs_agencies( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let agency_path = gtfs_path.join("agency.txt"); + if !agency_path.exists() { + warn!("agency.txt not found, skipping agency import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&agency_path)?; + + for result in rdr.records() { + let record = result?; + // agency_id,agency_name,agency_url,agency_timezone,agency_lang,agency_phone,agency_fare_url,agency_email + let agency_id = record.get(0).unwrap_or(""); + let agency_name = record.get(1).unwrap_or(""); + let agency_url = record.get(2).filter(|s| !s.is_empty()); + let agency_timezone = record.get(3).unwrap_or("Asia/Tokyo"); + let agency_lang = record.get(4).filter(|s| !s.is_empty()); + let agency_phone = record.get(5).filter(|s| !s.is_empty()); + let agency_fare_url = record.get(6).filter(|s| !s.is_empty()); + + sqlx::query( + r#"INSERT INTO gtfs_agencies + (agency_id, agency_name, agency_url, agency_timezone, agency_lang, agency_phone, agency_fare_url) + VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT (agency_id) DO NOTHING"#, + ) + .bind(agency_id) + .bind(agency_name) + .bind(agency_url) + .bind(agency_timezone) + .bind(agency_lang) + .bind(agency_phone) + .bind(agency_fare_url) + .execute(&mut *conn) + .await?; + } + + info!("Imported agencies."); + Ok(()) +} + +/// Import routes from routes.txt +async fn import_gtfs_routes( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let routes_path = gtfs_path.join("routes.txt"); + if !routes_path.exists() { + warn!("routes.txt not found, skipping routes import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&routes_path)?; + + for result in rdr.records() { + let record = result?; + // route_id,agency_id,route_short_name,route_long_name,route_desc,route_type,route_url,route_color,route_text_color,jp_parent_route_id + let route_id = record.get(0).unwrap_or(""); + let agency_id = record.get(1).filter(|s| !s.is_empty()); + let route_short_name = record.get(2).filter(|s| !s.is_empty()); + let route_long_name = record.get(3).filter(|s| !s.is_empty()); + let route_desc = record.get(4).filter(|s| !s.is_empty()); + let route_type: i32 = record.get(5).unwrap_or("3").parse().unwrap_or(3); + let route_url = record.get(6).filter(|s| !s.is_empty()); + let route_color = record.get(7).filter(|s| !s.is_empty()); + let route_text_color = record.get(8).filter(|s| !s.is_empty()); + + sqlx::query( + r#"INSERT INTO gtfs_routes + (route_id, agency_id, route_short_name, route_long_name, route_desc, route_type, route_url, route_color, route_text_color) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + ON CONFLICT (route_id) DO NOTHING"#, + ) + .bind(route_id) + .bind(agency_id) + .bind(route_short_name) + .bind(route_long_name) + .bind(route_desc) + .bind(route_type) + .bind(route_url) + .bind(route_color) + .bind(route_text_color) + .execute(&mut *conn) + .await?; + } + + info!("Imported routes."); + Ok(()) +} + +/// Import stops from stops.txt with translations +async fn import_gtfs_stops( + conn: &mut PgConnection, + gtfs_path: &Path, + translations: &HashMap<(String, String), Translation>, +) -> Result<(), Box> { + let stops_path = gtfs_path.join("stops.txt"); + if !stops_path.exists() { + warn!("stops.txt not found, skipping stops import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&stops_path)?; + let mut count = 0; + + for result in rdr.records() { + let record = result?; + // stop_id,stop_code,stop_name,stop_desc,stop_lat,stop_lon,zone_id,stop_url,location_type,parent_station,stop_timezone,wheelchair_boarding,platform_code,stop_access + let stop_id = record.get(0).unwrap_or(""); + let stop_code = record.get(1).filter(|s| !s.is_empty()); + let stop_name = record.get(2).unwrap_or(""); + let stop_desc = record.get(3).filter(|s| !s.is_empty()); + let stop_lat: f64 = record.get(4).unwrap_or("0").parse().unwrap_or(0.0); + let stop_lon: f64 = record.get(5).unwrap_or("0").parse().unwrap_or(0.0); + let zone_id = record.get(6).filter(|s| !s.is_empty()); + let stop_url = record.get(7).filter(|s| !s.is_empty()); + let location_type: i32 = record.get(8).unwrap_or("0").parse().unwrap_or(0); + let parent_station = record.get(9).filter(|s| !s.is_empty()); + let stop_timezone = record.get(10).filter(|s| !s.is_empty()); + let wheelchair_boarding: Option = record + .get(11) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let platform_code = record.get(12).filter(|s| !s.is_empty()); + + // Get translations (keyed by both child stop ID and parent stop ID) + let key = ("stops".to_string(), stop_id.to_string()); + let translation = translations.get(&key); + + let stop_name_k = translation.and_then(|t| t.ja_hrkt.clone()); + let stop_name_r = translation.and_then(|t| t.en.clone()); + let stop_name_zh = translation.and_then(|t| t.zh.clone()); + let stop_name_ko = translation.and_then(|t| t.ko.clone()); + + sqlx::query( + r#"INSERT INTO gtfs_stops + (stop_id, stop_code, stop_name, stop_name_k, stop_name_r, stop_name_zh, stop_name_ko, + stop_desc, stop_lat, stop_lon, zone_id, stop_url, location_type, parent_station, + stop_timezone, wheelchair_boarding, platform_code) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) + ON CONFLICT (stop_id) DO NOTHING"#, + ) + .bind(stop_id) + .bind(stop_code) + .bind(stop_name) + .bind(stop_name_k) + .bind(stop_name_r) + .bind(stop_name_zh) + .bind(stop_name_ko) + .bind(stop_desc) + .bind(stop_lat) + .bind(stop_lon) + .bind(zone_id) + .bind(stop_url) + .bind(location_type) + .bind(parent_station) + .bind(stop_timezone) + .bind(wheelchair_boarding) + .bind(platform_code) + .execute(&mut *conn) + .await?; + + count += 1; + } + + info!("Imported {} stops.", count); + Ok(()) +} + +/// Import calendar from calendar.txt +async fn import_gtfs_calendar( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let calendar_path = gtfs_path.join("calendar.txt"); + if !calendar_path.exists() { + warn!("calendar.txt not found, skipping calendar import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&calendar_path)?; + + for result in rdr.records() { + let record = result?; + // service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date + let service_id = record.get(0).unwrap_or(""); + let monday: bool = record.get(1).unwrap_or("0") == "1"; + let tuesday: bool = record.get(2).unwrap_or("0") == "1"; + let wednesday: bool = record.get(3).unwrap_or("0") == "1"; + let thursday: bool = record.get(4).unwrap_or("0") == "1"; + let friday: bool = record.get(5).unwrap_or("0") == "1"; + let saturday: bool = record.get(6).unwrap_or("0") == "1"; + let sunday: bool = record.get(7).unwrap_or("0") == "1"; + let start_date = record.get(8).unwrap_or(""); + let end_date = record.get(9).unwrap_or(""); + + // Parse dates (format: YYYYMMDD) + let start_date = chrono::NaiveDate::parse_from_str(start_date, "%Y%m%d")?; + let end_date = chrono::NaiveDate::parse_from_str(end_date, "%Y%m%d")?; + + sqlx::query( + r#"INSERT INTO gtfs_calendar + (service_id, monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ON CONFLICT (service_id) DO NOTHING"#, + ) + .bind(service_id) + .bind(monday) + .bind(tuesday) + .bind(wednesday) + .bind(thursday) + .bind(friday) + .bind(saturday) + .bind(sunday) + .bind(start_date) + .bind(end_date) + .execute(&mut *conn) + .await?; + } + + info!("Imported calendar."); + Ok(()) +} + +/// Import calendar_dates from calendar_dates.txt +async fn import_gtfs_calendar_dates( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let calendar_dates_path = gtfs_path.join("calendar_dates.txt"); + if !calendar_dates_path.exists() { + warn!("calendar_dates.txt not found, skipping calendar_dates import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&calendar_dates_path)?; + let mut count = 0; + + for result in rdr.records() { + let record = result?; + // service_id,date,exception_type + let service_id = record.get(0).unwrap_or(""); + let date = record.get(1).unwrap_or(""); + let exception_type: i32 = record.get(2).unwrap_or("1").parse().unwrap_or(1); + + let date = chrono::NaiveDate::parse_from_str(date, "%Y%m%d")?; + + sqlx::query( + r#"INSERT INTO gtfs_calendar_dates (service_id, date, exception_type) + VALUES ($1, $2, $3)"#, + ) + .bind(service_id) + .bind(date) + .bind(exception_type) + .execute(&mut *conn) + .await?; + + count += 1; + } + + info!("Imported {} calendar_dates.", count); + Ok(()) +} + +/// Import shapes from shapes.txt +async fn import_gtfs_shapes( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let shapes_path = gtfs_path.join("shapes.txt"); + if !shapes_path.exists() { + warn!("shapes.txt not found, skipping shapes import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&shapes_path)?; + let mut batch: Vec<(String, f64, f64, i32, Option)> = Vec::new(); + let batch_size = 5000; + + for result in rdr.records() { + let record = result?; + // shape_id,shape_pt_lat,shape_pt_lon,shape_pt_sequence,shape_dist_traveled + let shape_id = record.get(0).unwrap_or("").to_string(); + let shape_pt_lat: f64 = record.get(1).unwrap_or("0").parse().unwrap_or(0.0); + let shape_pt_lon: f64 = record.get(2).unwrap_or("0").parse().unwrap_or(0.0); + let shape_pt_sequence: i32 = record.get(3).unwrap_or("0").parse().unwrap_or(0); + let shape_dist_traveled: Option = record + .get(4) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + + batch.push(( + shape_id, + shape_pt_lat, + shape_pt_lon, + shape_pt_sequence, + shape_dist_traveled, + )); + + if batch.len() >= batch_size { + insert_shapes_batch(&mut *conn, &batch).await?; + batch.clear(); + } + } + + // Insert remaining + if !batch.is_empty() { + insert_shapes_batch(&mut *conn, &batch).await?; + } + + info!("Imported shapes."); + Ok(()) +} + +async fn insert_shapes_batch( + conn: &mut PgConnection, + batch: &[(String, f64, f64, i32, Option)], +) -> Result<(), Box> { + if batch.is_empty() { + return Ok(()); + } + + let mut sql = String::from( + "INSERT INTO gtfs_shapes (shape_id, shape_pt_lat, shape_pt_lon, shape_pt_sequence, shape_dist_traveled) VALUES ", + ); + let mut values: Vec = Vec::new(); + + for (i, (shape_id, lat, lon, seq, dist)) in batch.iter().enumerate() { + let dist_str = dist.map_or("NULL".to_string(), |d| d.to_string()); + values.push(format!( + "('{}', {}, {}, {}, {})", + shape_id.replace('\'', "''"), + lat, + lon, + seq, + dist_str + )); + + if (i + 1) % 1000 == 0 || i == batch.len() - 1 { + sql.push_str(&values.join(",")); + sql.push_str(" ON CONFLICT DO NOTHING"); + sqlx::query(&sql).execute(&mut *conn).await?; + sql = String::from( + "INSERT INTO gtfs_shapes (shape_id, shape_pt_lat, shape_pt_lon, shape_pt_sequence, shape_dist_traveled) VALUES ", + ); + values.clear(); + } + } + + Ok(()) +} + +/// Import trips from trips.txt +async fn import_gtfs_trips( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let trips_path = gtfs_path.join("trips.txt"); + if !trips_path.exists() { + warn!("trips.txt not found, skipping trips import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&trips_path)?; + let mut count = 0; + let mut batch: Vec = Vec::new(); + let batch_size = 5000; + + for result in rdr.records() { + let record = result?; + let route_id = record.get(0).unwrap_or("").to_string(); + let service_id = record.get(1).unwrap_or("").to_string(); + let trip_id = record.get(2).unwrap_or("").to_string(); + let trip_headsign = record + .get(3) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()); + let trip_short_name = record + .get(4) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()); + let direction_id: Option = record + .get(5) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let block_id = record + .get(6) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()); + let shape_id = record + .get(7) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()); + let wheelchair_accessible: Option = record + .get(8) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let bikes_allowed: Option = record + .get(9) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + + batch.push(( + trip_id, + route_id, + service_id, + trip_headsign, + trip_short_name, + direction_id, + block_id, + shape_id, + wheelchair_accessible, + bikes_allowed, + )); + + if batch.len() >= batch_size { + insert_trips_batch(&mut *conn, &batch).await?; + count += batch.len(); + batch.clear(); + + if count % 50000 == 0 { + info!("Imported {} trips...", count); + } + } + } + + // Insert remaining + if !batch.is_empty() { + insert_trips_batch(&mut *conn, &batch).await?; + count += batch.len(); + } + + info!("Imported {} trips.", count); + Ok(()) +} + +async fn insert_trips_batch( + conn: &mut PgConnection, + batch: &[TripBatchRow], +) -> Result<(), Box> { + if batch.is_empty() { + return Ok(()); + } + + let mut sql = String::from( + "INSERT INTO gtfs_trips (trip_id, route_id, service_id, trip_headsign, trip_short_name, direction_id, block_id, shape_id, wheelchair_accessible, bikes_allowed) VALUES ", + ); + let mut values: Vec = Vec::with_capacity(batch.len()); + + for ( + trip_id, + route_id, + service_id, + trip_headsign, + trip_short_name, + direction_id, + block_id, + shape_id, + wheelchair_accessible, + bikes_allowed, + ) in batch + { + let headsign_str = trip_headsign + .as_ref() + .map(|s| format!("'{}'", s.replace('\'', "''"))) + .unwrap_or_else(|| "NULL".to_string()); + let short_name_str = trip_short_name + .as_ref() + .map(|s| format!("'{}'", s.replace('\'', "''"))) + .unwrap_or_else(|| "NULL".to_string()); + let direction_str = direction_id + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + let block_str = block_id + .as_ref() + .map(|s| format!("'{}'", s.replace('\'', "''"))) + .unwrap_or_else(|| "NULL".to_string()); + let shape_str = shape_id + .as_ref() + .map(|s| format!("'{}'", s.replace('\'', "''"))) + .unwrap_or_else(|| "NULL".to_string()); + let wheelchair_str = wheelchair_accessible + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + let bikes_str = bikes_allowed + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + + values.push(format!( + "('{}', '{}', '{}', {}, {}, {}, {}, {}, {}, {})", + trip_id.replace('\'', "''"), + route_id.replace('\'', "''"), + service_id.replace('\'', "''"), + headsign_str, + short_name_str, + direction_str, + block_str, + shape_str, + wheelchair_str, + bikes_str + )); + } + + sql.push_str(&values.join(",")); + sql.push_str(" ON CONFLICT (trip_id) DO NOTHING"); + + sqlx::query(&sql).execute(&mut *conn).await?; + + Ok(()) +} + +/// Import stop_times from stop_times.txt (largest file, uses batch processing) +async fn import_gtfs_stop_times( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let stop_times_path = gtfs_path.join("stop_times.txt"); + if !stop_times_path.exists() { + warn!("stop_times.txt not found, skipping stop_times import."); + return Ok(()); + } + + info!("Importing stop_times (this may take a while)..."); + + let mut rdr = ReaderBuilder::new().from_path(&stop_times_path)?; + let mut count = 0; + let mut batch: Vec = Vec::new(); + let batch_size = 5000; + + for result in rdr.records() { + let record = result?; + // trip_id,arrival_time,departure_time,stop_id,stop_sequence,stop_headsign,pickup_type,drop_off_type,shape_dist_traveled,timepoint + let trip_id = record.get(0).unwrap_or("").to_string(); + let arrival_time = parse_gtfs_time(record.get(1).unwrap_or("")); + let departure_time = parse_gtfs_time(record.get(2).unwrap_or("")); + let stop_id = record.get(3).unwrap_or("").to_string(); + let stop_sequence: i32 = record.get(4).unwrap_or("0").parse().unwrap_or(0); + let stop_headsign = record + .get(5) + .filter(|s| !s.is_empty()) + .map(|s| s.to_string()); + let pickup_type: Option = record + .get(6) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let drop_off_type: Option = record + .get(7) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let shape_dist_traveled: Option = record + .get(8) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + let timepoint: Option = record + .get(9) + .filter(|s| !s.is_empty()) + .and_then(|s| s.parse().ok()); + + batch.push(( + trip_id, + arrival_time, + departure_time, + stop_id, + stop_sequence, + stop_headsign, + pickup_type, + drop_off_type, + shape_dist_traveled, + timepoint, + )); + + if batch.len() >= batch_size { + insert_stop_times_batch(&mut *conn, &batch).await?; + count += batch.len(); + batch.clear(); + + if count % 50000 == 0 { + info!("Imported {} stop_times...", count); + } + } + } + + // Insert remaining + if !batch.is_empty() { + insert_stop_times_batch(&mut *conn, &batch).await?; + count += batch.len(); + } + + info!("Imported {} stop_times.", count); + Ok(()) +} + +/// Parse GTFS time format (HH:MM:SS, can be > 24:00:00 for times past midnight) +/// Returns the time string as-is to support 24+ hour times (e.g., "25:30:00") +fn parse_gtfs_time(time_str: &str) -> Option { + if time_str.is_empty() { + return None; + } + + let parts: Vec<&str> = time_str.split(':').collect(); + if parts.len() != 3 { + return None; + } + + // Validate that all parts are valid numbers + let _hours: u32 = parts[0].parse().ok()?; + let _minutes: u32 = parts[1].parse().ok()?; + let _seconds: u32 = parts[2].parse().ok()?; + + // Return the original string to support times > 24:00:00 + Some(time_str.to_string()) +} + +async fn insert_stop_times_batch( + conn: &mut PgConnection, + batch: &[StopTimeBatchRow], +) -> Result<(), Box> { + if batch.is_empty() { + return Ok(()); + } + + // Build multi-row INSERT for better performance + let mut sql = String::from( + "INSERT INTO gtfs_stop_times (trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_headsign, pickup_type, drop_off_type, shape_dist_traveled, timepoint) VALUES ", + ); + let mut values: Vec = Vec::with_capacity(batch.len()); + + for ( + trip_id, + arrival_time, + departure_time, + stop_id, + stop_sequence, + stop_headsign, + pickup_type, + drop_off_type, + shape_dist_traveled, + timepoint, + ) in batch + { + let arrival_str = arrival_time + .as_ref() + .map(|t| format!("'{}'", t)) + .unwrap_or_else(|| "NULL".to_string()); + let departure_str = departure_time + .as_ref() + .map(|t| format!("'{}'", t)) + .unwrap_or_else(|| "NULL".to_string()); + let headsign_str = stop_headsign + .as_ref() + .map(|s| format!("'{}'", s.replace('\'', "''"))) + .unwrap_or_else(|| "NULL".to_string()); + let pickup_str = pickup_type + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + let dropoff_str = drop_off_type + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + let dist_str = shape_dist_traveled + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + let timepoint_str = timepoint + .map(|v| v.to_string()) + .unwrap_or_else(|| "NULL".to_string()); + + values.push(format!( + "('{}', {}, {}, '{}', {}, {}, {}, {}, {}, {})", + trip_id.replace('\'', "''"), + arrival_str, + departure_str, + stop_id.replace('\'', "''"), + stop_sequence, + headsign_str, + pickup_str, + dropoff_str, + dist_str, + timepoint_str + )); + } + + sql.push_str(&values.join(",")); + sql.push_str(" ON CONFLICT DO NOTHING"); + + sqlx::query(&sql).execute(&mut *conn).await?; + + Ok(()) +} + +/// Import feed_info from feed_info.txt +async fn import_gtfs_feed_info( + conn: &mut PgConnection, + gtfs_path: &Path, +) -> Result<(), Box> { + let feed_info_path = gtfs_path.join("feed_info.txt"); + if !feed_info_path.exists() { + warn!("feed_info.txt not found, skipping feed_info import."); + return Ok(()); + } + + let mut rdr = ReaderBuilder::new().from_path(&feed_info_path)?; + + for result in rdr.records() { + let record = result?; + // feed_publisher_name,feed_publisher_url,feed_lang,feed_start_date,feed_end_date,feed_version + let feed_publisher_name = record.get(0).unwrap_or(""); + let feed_publisher_url = record.get(1).filter(|s| !s.is_empty()); + let feed_lang = record.get(2).filter(|s| !s.is_empty()); + let feed_start_date = record + .get(3) + .filter(|s| !s.is_empty()) + .and_then(|s| chrono::NaiveDate::parse_from_str(s, "%Y%m%d").ok()); + let feed_end_date = record + .get(4) + .filter(|s| !s.is_empty()) + .and_then(|s| chrono::NaiveDate::parse_from_str(s, "%Y%m%d").ok()); + let feed_version = record.get(5).filter(|s| !s.is_empty()); + + sqlx::query( + r#"INSERT INTO gtfs_feed_info + (feed_publisher_name, feed_publisher_url, feed_lang, feed_start_date, feed_end_date, feed_version) + VALUES ($1, $2, $3, $4, $5, $6)"#, + ) + .bind(feed_publisher_name) + .bind(feed_publisher_url) + .bind(feed_lang) + .bind(feed_start_date) + .bind(feed_end_date) + .bind(feed_version) + .execute(&mut *conn) + .await?; + } + + info!("Imported feed_info."); + Ok(()) +} + +fn is_bus_feature_disabled() -> bool { + match env::var("DISABLE_BUS_FEATURE") { + Ok(s) => s.eq_ignore_ascii_case("true") || s == "1", + Err(_) => false, + } +} + +// ============================================================ +// GTFS to Stations/Lines Integration +// ============================================================ + +/// Convert hiragana characters to katakana +/// Hiragana range: U+3041 to U+3096 +/// Katakana range: U+30A1 to U+30F6 +fn hiragana_to_katakana(s: &str) -> String { + s.chars() + .map(|c| { + if ('\u{3041}'..='\u{3096}').contains(&c) { + char::from_u32(c as u32 + 0x60).unwrap_or(c) + } else { + c + } + }) + .collect() +} + +/// FNV-1a hash function for deterministic hashing across process invocations +/// Unlike DefaultHasher, this produces consistent results across runs +fn fnv1a_hash(data: &[u8]) -> u64 { + const FNV_OFFSET_BASIS: u64 = 0xcbf29ce484222325; + const FNV_PRIME: u64 = 0x100000001b3; + + let mut hash = FNV_OFFSET_BASIS; + for byte in data { + hash ^= *byte as u64; + hash = hash.wrapping_mul(FNV_PRIME); + } + hash +} + +/// Generate deterministic line_cd from route_id +/// Uses range starting at 100,000,000 to avoid conflicts with existing rail data +fn generate_bus_line_cd(route_id: &str) -> i32 { + let hash = fnv1a_hash(route_id.as_bytes()); + 100_000_000 + (hash % 10_000_000) as i32 +} + +/// Generate deterministic station_cd from stop_id and route_id +/// Uses range starting at 200,000,000 to avoid conflicts with existing rail data +fn generate_bus_station_cd(stop_id: &str, route_id: &str) -> i32 { + let combined = format!("{}-{}", stop_id, route_id); + let hash = fnv1a_hash(combined.as_bytes()); + 200_000_000 + (hash % 100_000_000) as i32 +} + +/// Generate deterministic station_g_cd from stop_id only (shared across routes) +/// Same bus stop on different routes will have the same station_g_cd +fn generate_bus_station_g_cd(stop_id: &str) -> i32 { + let hash = fnv1a_hash(stop_id.as_bytes()); + 200_000_000 + (hash % 100_000_000) as i32 +} + +/// Row type for reading gtfs_routes +#[derive(sqlx::FromRow)] +struct GtfsRouteRow { + route_id: String, + #[allow(dead_code)] + agency_id: Option, + route_short_name: Option, + route_long_name: Option, + #[allow(dead_code)] + route_long_name_k: Option, + #[allow(dead_code)] + route_long_name_r: Option, + #[allow(dead_code)] + route_long_name_zh: Option, + #[allow(dead_code)] + route_long_name_ko: Option, + #[allow(dead_code)] + route_desc: Option, + route_type: i32, + #[allow(dead_code)] + route_url: Option, + route_color: Option, + #[allow(dead_code)] + route_text_color: Option, + #[allow(dead_code)] + route_sort_order: Option, +} + +/// Row type for reading gtfs_stops +#[derive(sqlx::FromRow)] +struct GtfsStopRow { + stop_id: String, + #[allow(dead_code)] + stop_code: Option, + stop_name: String, + stop_name_k: Option, + stop_name_r: Option, + stop_name_zh: Option, + stop_name_ko: Option, + #[allow(dead_code)] + stop_desc: Option, + stop_lat: f64, + stop_lon: f64, + #[allow(dead_code)] + zone_id: Option, + #[allow(dead_code)] + stop_url: Option, + #[allow(dead_code)] + location_type: Option, + #[allow(dead_code)] + parent_station: Option, + #[allow(dead_code)] + stop_timezone: Option, + #[allow(dead_code)] + wheelchair_boarding: Option, + #[allow(dead_code)] + platform_code: Option, +} + +/// Integrate GTFS bus data into stations/lines tables +/// +/// This function wraps all integration operations in a single database transaction. +/// If any step fails, all changes are rolled back to maintain database consistency. +pub async fn integrate_gtfs_to_stations() -> Result<(), Box> { + if is_bus_feature_disabled() { + info!("Bus feature is disabled, skipping GTFS integration."); + return Ok(()); + } + + let db_url = fetch_database_url(); + let mut conn = PgConnection::connect(&db_url).await?; + + // Check if GTFS data exists (outside transaction for quick exit) + let gtfs_route_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM gtfs_routes") + .fetch_one(&mut conn) + .await?; + + if gtfs_route_count.0 == 0 { + info!("No GTFS routes found, skipping integration."); + return Ok(()); + } + + info!("Starting GTFS to stations/lines integration (using transaction)..."); + + // Begin transaction - all changes will be rolled back if any step fails + let mut tx = conn.begin().await?; + + // Step 1: Clear existing bus data from stations/lines + sqlx::query("DELETE FROM stations WHERE transport_type = 1") + .execute(&mut *tx) + .await?; + sqlx::query("DELETE FROM lines WHERE transport_type = 1") + .execute(&mut *tx) + .await?; + info!("Cleared existing bus data from stations/lines tables."); + + // Step 2: Insert bus routes as lines + integrate_gtfs_routes_to_lines(&mut tx).await?; + + // Step 3: Build stop-route mapping from stop_times + let stop_route_map = build_stop_route_mapping(&mut tx).await?; + + // Step 4: Insert bus stops as stations + integrate_gtfs_stops_to_stations(&mut tx, &stop_route_map).await?; + + // Step 5: Update cross-references in GTFS tables + update_gtfs_crossreferences(&mut tx, &stop_route_map).await?; + + sqlx::query("ANALYZE;").execute(&mut *tx).await?; + + // Commit the transaction - all changes are now permanent + tx.commit().await?; + + info!("GTFS integration completed successfully (transaction committed)."); + Ok(()) +} + +/// Integrate gtfs_routes into lines table +async fn integrate_gtfs_routes_to_lines( + conn: &mut PgConnection, +) -> Result<(), Box> { + let routes: Vec = sqlx::query_as("SELECT * FROM gtfs_routes") + .fetch_all(&mut *conn) + .await?; + + let company_cd = 119; // Tokyo Metropolitan Bureau of Transportation (東京都交通局) + + for route in &routes { + let line_cd = generate_bus_line_cd(&route.route_id); + let line_name = route + .route_short_name + .clone() + .unwrap_or_else(|| route.route_long_name.clone().unwrap_or_default()); + let line_color = route.route_color.as_ref().map(|c| { + if c.starts_with('#') { + c.clone() + } else { + format!("#{}", c) + } + }); + + let line_name_r = route.route_long_name.clone().unwrap_or_default(); + + sqlx::query( + r#"INSERT INTO lines ( + line_cd, company_cd, line_name, line_name_k, line_name_h, + line_name_r, line_color_c, line_type, e_status, e_sort, transport_type + ) VALUES ( + $1, $2, $3, $4, $5, $6, $7, $8, 0, $1, 1 + ) + ON CONFLICT (line_cd) DO NOTHING"#, + ) + .bind(line_cd) + .bind(company_cd) + .bind(&line_name) + .bind(&line_name) // line_name_k + .bind(&line_name) // line_name_h + .bind(&line_name_r) // line_name_r + .bind(&line_color) + .bind(route.route_type) + .execute(&mut *conn) + .await?; + + // Update gtfs_routes with generated line_cd + sqlx::query("UPDATE gtfs_routes SET line_cd = $1 WHERE route_id = $2") + .bind(line_cd) + .bind(&route.route_id) + .execute(&mut *conn) + .await?; + } + + info!("Integrated {} routes as lines.", routes.len()); + Ok(()) +} + +/// Build mapping of (parent_stop_id, route_id) -> stop_sequence from gtfs_stop_times +/// Groups child stops by their parent_station to represent physical bus stops +/// +/// # Ordering Strategy +/// +/// ## 1. Main trip sequence +/// - Select the trip with most stops as "main trip" (prefer direction_id=0) +/// - Use stop_sequence from main trip directly +/// +/// ## 2. Variant stop estimation +/// For stops only on variant trips: +/// - Use LAG/LEAD to find neighboring stops on the variant trip +/// - Look up neighbors' positions on the main trip +/// - Interpolate position based on neighbor positions +/// - Terminal stops (next_stop_id IS NULL) are placed at the end +/// - Start stops (prev_stop_id IS NULL) are placed at the beginning +async fn build_stop_route_mapping( + conn: &mut PgConnection, +) -> Result>, Box> { + // Strategy: + // 1. Find the "main" trip for each route (most stops, prefer direction_id=0) + // 2. Use stop_sequence from main trip for stops on it + // 3. For variant-only stops, use LAG/LEAD to find neighbors, then look up their + // main trip positions to estimate where the variant stop should go + let rows: Vec<(String, String, i32)> = sqlx::query_as( + r#"WITH RECURSIVE main_trips AS ( + -- Find the trip with the most stops for each route (prefer direction_id=0) + SELECT DISTINCT ON (gt.route_id) + gt.route_id, + gt.trip_id, + gt.direction_id as main_direction_id, + COUNT(*) as stop_count + FROM gtfs_trips gt + JOIN gtfs_stop_times gst ON gt.trip_id = gst.trip_id + GROUP BY gt.route_id, gt.trip_id, gt.direction_id + ORDER BY gt.route_id, + CASE WHEN gt.direction_id = 0 THEN 0 ELSE 1 END, + COUNT(*) DESC + ), + main_trip_stops AS ( + -- Get stops from main trips with their sequence + SELECT DISTINCT ON (COALESCE(gs.parent_station, gs.stop_id), mt.route_id) + COALESCE(gs.parent_station, gs.stop_id) as parent_stop_id, + mt.route_id, + gst.stop_sequence + FROM main_trips mt + JOIN gtfs_stop_times gst ON mt.trip_id = gst.trip_id + JOIN gtfs_stops gs ON gst.stop_id = gs.stop_id + ORDER BY COALESCE(gs.parent_station, gs.stop_id), mt.route_id, gst.stop_sequence + ), + main_trip_max_seq AS ( + SELECT route_id, MAX(stop_sequence) as max_seq + FROM main_trip_stops + GROUP BY route_id + ), + -- Get variant trips (non-main trips) with their stops and neighbors using window functions + variant_trip_stops_with_neighbors AS ( + SELECT + COALESCE(gs.parent_station, gs.stop_id) as parent_stop_id, + gt.route_id, + gt.trip_id, + gt.direction_id as variant_direction_id, + gst.stop_sequence, + LAG(COALESCE(gs.parent_station, gs.stop_id)) OVER ( + PARTITION BY gt.trip_id ORDER BY gst.stop_sequence + ) as prev_stop_id, + LEAD(COALESCE(gs.parent_station, gs.stop_id)) OVER ( + PARTITION BY gt.trip_id ORDER BY gst.stop_sequence + ) as next_stop_id + FROM gtfs_trips gt + JOIN gtfs_stop_times gst ON gt.trip_id = gst.trip_id + JOIN gtfs_stops gs ON gst.stop_id = gs.stop_id + WHERE NOT EXISTS ( + SELECT 1 FROM main_trips mt WHERE mt.trip_id = gt.trip_id + ) + ), + -- Find variant-only stops (not on main trip) with their neighbor info + -- Exclude stops that ONLY appear on NULL direction_id trips (loop routes) + variant_only_with_neighbors AS ( + SELECT DISTINCT ON (vts.parent_stop_id, vts.route_id) + vts.parent_stop_id, + vts.route_id, + vts.variant_direction_id, + vts.prev_stop_id, + vts.next_stop_id + FROM variant_trip_stops_with_neighbors vts + WHERE NOT EXISTS ( + SELECT 1 FROM main_trip_stops mts + WHERE mts.parent_stop_id = vts.parent_stop_id + AND mts.route_id = vts.route_id + ) + -- Only include stops that appear on at least one non-NULL direction_id trip + AND EXISTS ( + SELECT 1 FROM gtfs_trips gt2 + JOIN gtfs_stop_times gst2 ON gt2.trip_id = gst2.trip_id + JOIN gtfs_stops gs2 ON gst2.stop_id = gs2.stop_id + WHERE gt2.route_id = vts.route_id + AND COALESCE(gs2.parent_station, gs2.stop_id) = vts.parent_stop_id + AND gt2.direction_id IS NOT NULL + ) + ORDER BY vts.parent_stop_id, vts.route_id, vts.stop_sequence + ), + -- Recursive CTE to find the nearest main-trip stop by following prev chain + prev_chain AS ( + -- Base case: start from each variant stop + SELECT + von.parent_stop_id as origin_stop_id, + von.route_id, + von.prev_stop_id as current_stop_id, + 1 as depth, + ARRAY[von.parent_stop_id::TEXT] as visited + FROM variant_only_with_neighbors von + WHERE von.prev_stop_id IS NOT NULL + + UNION ALL + + -- Recursive case: if current stop is also variant-only, follow its prev + SELECT + pc.origin_stop_id, + pc.route_id, + von2.prev_stop_id as current_stop_id, + pc.depth + 1, + pc.visited || pc.current_stop_id::TEXT + FROM prev_chain pc + JOIN variant_only_with_neighbors von2 + ON pc.current_stop_id = von2.parent_stop_id + AND pc.route_id = von2.route_id + WHERE pc.depth < 10 + AND von2.prev_stop_id IS NOT NULL + AND NOT pc.current_stop_id::TEXT = ANY(pc.visited) + -- Stop if we already found a main-trip stop + AND NOT EXISTS ( + SELECT 1 FROM main_trip_stops mts + WHERE mts.parent_stop_id = pc.current_stop_id + AND mts.route_id = pc.route_id + ) + ), + prev_resolved AS ( + -- For each origin stop, find the first stop in the chain that's on main trip + SELECT DISTINCT ON (pc.origin_stop_id, pc.route_id) + pc.origin_stop_id, + pc.route_id, + mts.stop_sequence as prev_main_seq, + pc.depth as prev_depth + FROM prev_chain pc + JOIN main_trip_stops mts + ON pc.current_stop_id = mts.parent_stop_id + AND pc.route_id = mts.route_id + ORDER BY pc.origin_stop_id, pc.route_id, pc.depth + ), + -- Similarly, recursive CTE for next chain + next_chain AS ( + SELECT + von.parent_stop_id as origin_stop_id, + von.route_id, + von.next_stop_id as current_stop_id, + 1 as depth, + ARRAY[von.parent_stop_id::TEXT] as visited + FROM variant_only_with_neighbors von + WHERE von.next_stop_id IS NOT NULL + + UNION ALL + + SELECT + nc.origin_stop_id, + nc.route_id, + von2.next_stop_id as current_stop_id, + nc.depth + 1, + nc.visited || nc.current_stop_id::TEXT + FROM next_chain nc + JOIN variant_only_with_neighbors von2 + ON nc.current_stop_id = von2.parent_stop_id + AND nc.route_id = von2.route_id + WHERE nc.depth < 10 + AND von2.next_stop_id IS NOT NULL + AND NOT nc.current_stop_id::TEXT = ANY(nc.visited) + AND NOT EXISTS ( + SELECT 1 FROM main_trip_stops mts + WHERE mts.parent_stop_id = nc.current_stop_id + AND mts.route_id = nc.route_id + ) + ), + next_resolved AS ( + SELECT DISTINCT ON (nc.origin_stop_id, nc.route_id) + nc.origin_stop_id, + nc.route_id, + mts.stop_sequence as next_main_seq, + nc.depth as next_depth + FROM next_chain nc + JOIN main_trip_stops mts + ON nc.current_stop_id = mts.parent_stop_id + AND nc.route_id = mts.route_id + ORDER BY nc.origin_stop_id, nc.route_id, nc.depth + ), + -- Look up main trip sequences for the neighbors (with recursive fallback) + -- When variant trip has different direction_id than main trip, swap prev/next + variant_estimated AS ( + SELECT + von.parent_stop_id, + von.route_id, + CASE + -- Direct neighbors on main trip (single-level lookup) + WHEN prev_mts.stop_sequence IS NOT NULL AND next_mts.stop_sequence IS NOT NULL + THEN ((prev_mts.stop_sequence + next_mts.stop_sequence) / 2.0) + WHEN prev_mts.stop_sequence IS NOT NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN (prev_mts.stop_sequence + 0.5) + ELSE (prev_mts.stop_sequence - 0.5) + END + WHEN next_mts.stop_sequence IS NOT NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN (next_mts.stop_sequence - 0.5) + ELSE (next_mts.stop_sequence + 0.5) + END + -- Recursive fallback: use resolved chains + WHEN pr.prev_main_seq IS NOT NULL AND nr.next_main_seq IS NOT NULL + THEN (pr.prev_main_seq + nr.next_main_seq) / 2.0 + + (pr.prev_depth - nr.next_depth) * 0.01 -- Slight offset based on depth difference + WHEN pr.prev_main_seq IS NOT NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN (pr.prev_main_seq + 0.1 * pr.prev_depth) + ELSE (pr.prev_main_seq - 0.1 * pr.prev_depth) + END + WHEN nr.next_main_seq IS NOT NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN (nr.next_main_seq - 0.1 * nr.next_depth) + ELSE (nr.next_main_seq + 0.1 * nr.next_depth) + END + -- TERMINAL stop (next_stop_id IS NULL, no neighbors on main trip): put at END or START based on direction + WHEN von.next_stop_id IS NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN (mtms.max_seq + 0.5) + ELSE 0.5 + END + -- START stop (prev_stop_id IS NULL, no neighbors on main trip): put at START or END based on direction + WHEN von.prev_stop_id IS NULL + THEN CASE WHEN von.variant_direction_id IS NULL + OR von.variant_direction_id = mt.main_direction_id + THEN 0.5 + ELSE (mtms.max_seq + 0.5) + END + -- Fallback: put at end + ELSE (mtms.max_seq + 9999) + END as estimated_seq + FROM variant_only_with_neighbors von + JOIN main_trips mt ON von.route_id = mt.route_id + JOIN main_trip_max_seq mtms ON von.route_id = mtms.route_id + LEFT JOIN main_trip_stops prev_mts + ON von.prev_stop_id = prev_mts.parent_stop_id + AND von.route_id = prev_mts.route_id + LEFT JOIN main_trip_stops next_mts + ON von.next_stop_id = next_mts.parent_stop_id + AND von.route_id = next_mts.route_id + LEFT JOIN prev_resolved pr + ON von.parent_stop_id = pr.origin_stop_id + AND von.route_id = pr.route_id + LEFT JOIN next_resolved nr + ON von.parent_stop_id = nr.origin_stop_id + AND von.route_id = nr.route_id + ), + combined AS ( + SELECT parent_stop_id, route_id, stop_sequence::FLOAT as seq, 1 as priority + FROM main_trip_stops + UNION ALL + SELECT parent_stop_id, route_id, estimated_seq as seq, 2 as priority + FROM variant_estimated + ), + unique_stops AS ( + -- Deduplicate: prefer shape distance > main trip > variant + SELECT DISTINCT ON (parent_stop_id, route_id) + parent_stop_id, + route_id, + seq + FROM combined + ORDER BY parent_stop_id, route_id, priority, seq + ), + numbered AS ( + -- Re-number sequences to be consecutive integers + SELECT + parent_stop_id, + route_id, + ROW_NUMBER() OVER (PARTITION BY route_id ORDER BY seq, parent_stop_id)::INT as stop_sequence + FROM unique_stops + ) + SELECT parent_stop_id, route_id, stop_sequence + FROM numbered + ORDER BY route_id, stop_sequence"#, + ) + .fetch_all(&mut *conn) + .await?; + + let mut map: HashMap> = HashMap::new(); + for (parent_stop_id, route_id, stop_sequence) in rows { + map.entry(parent_stop_id) + .or_default() + .push((route_id, stop_sequence)); + } + + info!("Built stop-route mapping for {} physical stops.", map.len()); + Ok(map) +} + +/// Integrate gtfs_stops into stations table (one record per physical stop per route) +/// Only processes parent stops (stops without parent_station) to avoid duplicates +async fn integrate_gtfs_stops_to_stations( + conn: &mut PgConnection, + stop_route_map: &HashMap>, +) -> Result<(), Box> { + // Only fetch parent stops (stops that have no parent_station) + // These represent physical bus stops, child stops are just different poles + let stops: Vec = sqlx::query_as( + "SELECT * FROM gtfs_stops WHERE parent_station IS NULL OR parent_station = ''", + ) + .fetch_all(&mut *conn) + .await?; + + let mut inserted_count = 0; + + for stop in &stops { + let station_g_cd = generate_bus_station_g_cd(&stop.stop_id); + + // Get routes for this parent stop (with stop_sequence) + // The mapping now uses parent_stop_id as key + let routes = match stop_route_map.get(&stop.stop_id) { + Some(r) => r.clone(), + None => continue, // Skip stops not on any route + }; + + // Create a station record for each route this physical stop serves + for (route_id, stop_sequence) in &routes { + let station_cd = generate_bus_station_cd(&stop.stop_id, route_id); + let line_cd = generate_bus_line_cd(route_id); + + sqlx::query( + r#"INSERT INTO stations ( + station_cd, station_g_cd, station_name, station_name_k, + station_name_r, station_name_zh, station_name_ko, + line_cd, pref_cd, post, address, lon, lat, + open_ymd, close_ymd, e_status, e_sort, transport_type + ) VALUES ( + $1, $2, $3, $4, $5, $6, $7, $8, 13, '', '', $9, $10, + '', '', 0, $11, 1 + ) + ON CONFLICT (station_cd) DO NOTHING"#, + ) + .bind(station_cd) + .bind(station_g_cd) + .bind(&stop.stop_name) + .bind( + stop.stop_name_k + .as_ref() + .map(|k| hiragana_to_katakana(k)) + .unwrap_or_else(|| stop.stop_name.clone()), + ) + .bind(&stop.stop_name_r) + .bind(&stop.stop_name_zh) + .bind(&stop.stop_name_ko) + .bind(line_cd) + .bind(stop.stop_lon) + .bind(stop.stop_lat) + .bind(stop_sequence) + .execute(&mut *conn) + .await?; + + inserted_count += 1; + } + } + + info!( + "Integrated {} station records from {} GTFS stops.", + inserted_count, + stops.len() + ); + Ok(()) +} + +/// Update cross-references in GTFS tables (gtfs_stops.station_cd, gtfs_routes.line_cd) +async fn update_gtfs_crossreferences( + conn: &mut PgConnection, + stop_route_map: &HashMap>, +) -> Result<(), Box> { + // Update gtfs_stops with primary station_cd (using first route) + // Updates both parent stops and their child stops with the same station_cd + for (parent_stop_id, routes) in stop_route_map { + if let Some((route_id, _)) = routes.first() { + let station_cd = generate_bus_station_cd(parent_stop_id, route_id); + // Update parent stop and all its children + sqlx::query( + "UPDATE gtfs_stops SET station_cd = $1 WHERE stop_id = $2 OR parent_station = $2", + ) + .bind(station_cd) + .bind(parent_stop_id) + .execute(&mut *conn) + .await?; + } + } + + info!("Updated GTFS cross-references."); + Ok(()) +} diff --git a/stationapi/src/infrastructure.rs b/stationapi/src/infrastructure.rs index acd7ca70..794158e8 100644 --- a/stationapi/src/infrastructure.rs +++ b/stationapi/src/infrastructure.rs @@ -1,5 +1,6 @@ pub mod company_repository; pub mod error; +pub mod gtfs_repository; pub mod line_repository; pub mod station_repository; pub mod train_type_repository; diff --git a/stationapi/src/infrastructure/gtfs_repository.rs b/stationapi/src/infrastructure/gtfs_repository.rs new file mode 100644 index 00000000..2029d080 --- /dev/null +++ b/stationapi/src/infrastructure/gtfs_repository.rs @@ -0,0 +1,766 @@ +use async_trait::async_trait; +use sqlx::{Pool, Postgres}; +use std::sync::Arc; + +use crate::domain::{ + entity::gtfs::{ + GtfsAgency, GtfsCalendar, GtfsCalendarDate, GtfsFeedInfo, GtfsRoute, GtfsShapePoint, + GtfsStop, GtfsStopTime, GtfsTrip, + }, + error::DomainError, + repository::gtfs_repository::{ + GtfsAgencyRepository, GtfsCalendarDateRepository, GtfsCalendarRepository, + GtfsFeedInfoRepository, GtfsRouteRepository, GtfsShapeRepository, GtfsStopRepository, + GtfsStopTimeRepository, GtfsTripRepository, + }, +}; + +// ============================================================ +// Row types for SQLx +// ============================================================ + +#[derive(sqlx::FromRow)] +struct GtfsAgencyRow { + agency_id: String, + agency_name: String, + agency_name_k: Option, + agency_name_r: Option, + agency_name_zh: Option, + agency_name_ko: Option, + agency_url: Option, + agency_timezone: Option, + agency_lang: Option, + agency_phone: Option, + agency_fare_url: Option, + company_cd: Option, +} + +impl From for GtfsAgency { + fn from(row: GtfsAgencyRow) -> Self { + Self { + agency_id: row.agency_id, + agency_name: row.agency_name, + agency_name_k: row.agency_name_k, + agency_name_r: row.agency_name_r, + agency_name_zh: row.agency_name_zh, + agency_name_ko: row.agency_name_ko, + agency_url: row.agency_url, + agency_timezone: row + .agency_timezone + .unwrap_or_else(|| "Asia/Tokyo".to_string()), + agency_lang: row.agency_lang, + agency_phone: row.agency_phone, + agency_fare_url: row.agency_fare_url, + company_cd: row.company_cd, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsRouteRow { + route_id: String, + agency_id: Option, + route_short_name: Option, + route_long_name: Option, + route_long_name_k: Option, + route_long_name_r: Option, + route_long_name_zh: Option, + route_long_name_ko: Option, + route_desc: Option, + route_type: i32, + route_url: Option, + route_color: Option, + route_text_color: Option, + route_sort_order: Option, + line_cd: Option, +} + +impl From for GtfsRoute { + fn from(row: GtfsRouteRow) -> Self { + Self { + route_id: row.route_id, + agency_id: row.agency_id, + route_short_name: row.route_short_name, + route_long_name: row.route_long_name, + route_long_name_k: row.route_long_name_k, + route_long_name_r: row.route_long_name_r, + route_long_name_zh: row.route_long_name_zh, + route_long_name_ko: row.route_long_name_ko, + route_desc: row.route_desc, + route_type: row.route_type, + route_url: row.route_url, + route_color: row.route_color, + route_text_color: row.route_text_color, + route_sort_order: row.route_sort_order, + line_cd: row.line_cd, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsStopRow { + stop_id: String, + stop_code: Option, + stop_name: String, + stop_name_k: Option, + stop_name_r: Option, + stop_name_zh: Option, + stop_name_ko: Option, + stop_desc: Option, + stop_lat: f64, + stop_lon: f64, + zone_id: Option, + stop_url: Option, + location_type: Option, + parent_station: Option, + stop_timezone: Option, + wheelchair_boarding: Option, + platform_code: Option, + station_cd: Option, +} + +impl From for GtfsStop { + fn from(row: GtfsStopRow) -> Self { + Self { + stop_id: row.stop_id, + stop_code: row.stop_code, + stop_name: row.stop_name, + stop_name_k: row.stop_name_k, + stop_name_r: row.stop_name_r, + stop_name_zh: row.stop_name_zh, + stop_name_ko: row.stop_name_ko, + stop_desc: row.stop_desc, + stop_lat: row.stop_lat, + stop_lon: row.stop_lon, + zone_id: row.zone_id, + stop_url: row.stop_url, + location_type: row.location_type, + parent_station: row.parent_station, + stop_timezone: row.stop_timezone, + wheelchair_boarding: row.wheelchair_boarding, + platform_code: row.platform_code, + station_cd: row.station_cd, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsCalendarRow { + service_id: String, + monday: bool, + tuesday: bool, + wednesday: bool, + thursday: bool, + friday: bool, + saturday: bool, + sunday: bool, + start_date: chrono::NaiveDate, + end_date: chrono::NaiveDate, +} + +impl From for GtfsCalendar { + fn from(row: GtfsCalendarRow) -> Self { + Self { + service_id: row.service_id, + monday: row.monday, + tuesday: row.tuesday, + wednesday: row.wednesday, + thursday: row.thursday, + friday: row.friday, + saturday: row.saturday, + sunday: row.sunday, + start_date: row.start_date.format("%Y%m%d").to_string(), + end_date: row.end_date.format("%Y%m%d").to_string(), + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsCalendarDateRow { + id: i32, + service_id: String, + date: chrono::NaiveDate, + exception_type: i32, +} + +impl From for GtfsCalendarDate { + fn from(row: GtfsCalendarDateRow) -> Self { + Self { + id: row.id, + service_id: row.service_id, + date: row.date.format("%Y%m%d").to_string(), + exception_type: row.exception_type, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsTripRow { + trip_id: String, + route_id: String, + service_id: String, + trip_headsign: Option, + trip_headsign_k: Option, + trip_headsign_r: Option, + trip_short_name: Option, + direction_id: Option, + block_id: Option, + shape_id: Option, + wheelchair_accessible: Option, + bikes_allowed: Option, +} + +impl From for GtfsTrip { + fn from(row: GtfsTripRow) -> Self { + Self { + trip_id: row.trip_id, + route_id: row.route_id, + service_id: row.service_id, + trip_headsign: row.trip_headsign, + trip_headsign_k: row.trip_headsign_k, + trip_headsign_r: row.trip_headsign_r, + trip_short_name: row.trip_short_name, + direction_id: row.direction_id, + block_id: row.block_id, + shape_id: row.shape_id, + wheelchair_accessible: row.wheelchair_accessible, + bikes_allowed: row.bikes_allowed, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsStopTimeRow { + id: i32, + trip_id: String, + arrival_time: Option, + departure_time: Option, + stop_id: String, + stop_sequence: i32, + stop_headsign: Option, + pickup_type: Option, + drop_off_type: Option, + shape_dist_traveled: Option, + timepoint: Option, +} + +impl From for GtfsStopTime { + fn from(row: GtfsStopTimeRow) -> Self { + Self { + id: row.id, + trip_id: row.trip_id, + arrival_time: row.arrival_time, + departure_time: row.departure_time, + stop_id: row.stop_id, + stop_sequence: row.stop_sequence, + stop_headsign: row.stop_headsign, + pickup_type: row.pickup_type, + drop_off_type: row.drop_off_type, + shape_dist_traveled: row.shape_dist_traveled, + timepoint: row.timepoint, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsShapePointRow { + id: i32, + shape_id: String, + shape_pt_lat: f64, + shape_pt_lon: f64, + shape_pt_sequence: i32, + shape_dist_traveled: Option, +} + +impl From for GtfsShapePoint { + fn from(row: GtfsShapePointRow) -> Self { + Self { + id: row.id, + shape_id: row.shape_id, + shape_pt_lat: row.shape_pt_lat, + shape_pt_lon: row.shape_pt_lon, + shape_pt_sequence: row.shape_pt_sequence, + shape_dist_traveled: row.shape_dist_traveled, + } + } +} + +#[derive(sqlx::FromRow)] +struct GtfsFeedInfoRow { + id: i32, + feed_publisher_name: String, + feed_publisher_url: Option, + feed_lang: Option, + feed_start_date: Option, + feed_end_date: Option, + feed_version: Option, + feed_contact_email: Option, + feed_contact_url: Option, + imported_at: Option, +} + +impl From for GtfsFeedInfo { + fn from(row: GtfsFeedInfoRow) -> Self { + Self { + id: row.id, + feed_publisher_name: row.feed_publisher_name, + feed_publisher_url: row.feed_publisher_url, + feed_lang: row.feed_lang, + feed_start_date: row.feed_start_date.map(|d| d.format("%Y%m%d").to_string()), + feed_end_date: row.feed_end_date.map(|d| d.format("%Y%m%d").to_string()), + feed_version: row.feed_version, + feed_contact_email: row.feed_contact_email, + feed_contact_url: row.feed_contact_url, + imported_at: row + .imported_at + .map(|dt| dt.format("%Y-%m-%d %H:%M:%S").to_string()), + } + } +} + +// ============================================================ +// Repository implementations (using runtime query_as) +// ============================================================ + +pub struct MyGtfsAgencyRepository { + pool: Arc>, +} + +impl MyGtfsAgencyRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsAgencyRepository for MyGtfsAgencyRepository { + async fn find_by_id(&self, agency_id: &str) -> Result, DomainError> { + let row = sqlx::query_as::<_, GtfsAgencyRow>( + r#"SELECT * FROM gtfs_agencies WHERE agency_id = $1"#, + ) + .bind(agency_id) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_all(&self) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsAgencyRow>(r#"SELECT * FROM gtfs_agencies"#) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_company_cd(&self, company_cd: i32) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsAgencyRow>( + r#"SELECT * FROM gtfs_agencies WHERE company_cd = $1"#, + ) + .bind(company_cd) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsRouteRepository { + pool: Arc>, +} + +impl MyGtfsRouteRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsRouteRepository for MyGtfsRouteRepository { + async fn find_by_id(&self, route_id: &str) -> Result, DomainError> { + let row = + sqlx::query_as::<_, GtfsRouteRow>(r#"SELECT * FROM gtfs_routes WHERE route_id = $1"#) + .bind(route_id) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_by_agency_id(&self, agency_id: &str) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsRouteRow>( + r#"SELECT * FROM gtfs_routes WHERE agency_id = $1 ORDER BY route_sort_order"#, + ) + .bind(agency_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_line_cd(&self, line_cd: i32) -> Result, DomainError> { + let rows = + sqlx::query_as::<_, GtfsRouteRow>(r#"SELECT * FROM gtfs_routes WHERE line_cd = $1"#) + .bind(line_cd) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn search_by_name( + &self, + name: &str, + limit: Option, + ) -> Result, DomainError> { + let search_pattern = format!("%{name}%"); + let limit = limit.unwrap_or(10) as i64; + + let rows = sqlx::query_as::<_, GtfsRouteRow>( + r#"SELECT * FROM gtfs_routes + WHERE route_short_name LIKE $1 + OR route_long_name LIKE $1 + OR route_long_name_k LIKE $1 + ORDER BY route_sort_order + LIMIT $2"#, + ) + .bind(&search_pattern) + .bind(limit) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsStopRepository { + pool: Arc>, +} + +impl MyGtfsStopRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsStopRepository for MyGtfsStopRepository { + async fn find_by_id(&self, stop_id: &str) -> Result, DomainError> { + let row = + sqlx::query_as::<_, GtfsStopRow>(r#"SELECT * FROM gtfs_stops WHERE stop_id = $1"#) + .bind(stop_id) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_by_station_cd(&self, station_cd: i32) -> Result, DomainError> { + let rows = + sqlx::query_as::<_, GtfsStopRow>(r#"SELECT * FROM gtfs_stops WHERE station_cd = $1"#) + .bind(station_cd) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_coordinates( + &self, + latitude: f64, + longitude: f64, + limit: Option, + ) -> Result, DomainError> { + let limit = limit.unwrap_or(10) as i32; + + let rows = sqlx::query_as::<_, GtfsStopRow>( + r#"SELECT * FROM gtfs_stops + ORDER BY point(stop_lat, stop_lon) <-> point($1, $2) + LIMIT $3"#, + ) + .bind(latitude) + .bind(longitude) + .bind(limit) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn search_by_name( + &self, + name: &str, + limit: Option, + ) -> Result, DomainError> { + let search_pattern = format!("%{name}%"); + let limit = limit.unwrap_or(10) as i64; + + let rows = sqlx::query_as::<_, GtfsStopRow>( + r#"SELECT * FROM gtfs_stops + WHERE stop_name LIKE $1 + OR stop_name_k LIKE $1 + OR stop_name_r LIKE $1 + LIMIT $2"#, + ) + .bind(&search_pattern) + .bind(limit) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_route_id(&self, route_id: &str) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsStopRow>( + r#"SELECT DISTINCT gs.* + FROM gtfs_stops gs + JOIN gtfs_stop_times gst ON gs.stop_id = gst.stop_id + JOIN gtfs_trips gt ON gst.trip_id = gt.trip_id + WHERE gt.route_id = $1 + ORDER BY gs.stop_name"#, + ) + .bind(route_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsCalendarRepository { + pool: Arc>, +} + +impl MyGtfsCalendarRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsCalendarRepository for MyGtfsCalendarRepository { + async fn find_by_id(&self, service_id: &str) -> Result, DomainError> { + let row = sqlx::query_as::<_, GtfsCalendarRow>( + r#"SELECT * FROM gtfs_calendar WHERE service_id = $1"#, + ) + .bind(service_id) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_active_on_date(&self, date: &str) -> Result, DomainError> { + let date = chrono::NaiveDate::parse_from_str(date, "%Y%m%d") + .map_err(|e| DomainError::Unexpected(e.to_string()))?; + + let rows = sqlx::query_as::<_, GtfsCalendarRow>( + r#"SELECT * FROM gtfs_calendar + WHERE start_date <= $1 AND end_date >= $1"#, + ) + .bind(date) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsCalendarDateRepository { + pool: Arc>, +} + +impl MyGtfsCalendarDateRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsCalendarDateRepository for MyGtfsCalendarDateRepository { + async fn get_by_service_id( + &self, + service_id: &str, + ) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsCalendarDateRow>( + r#"SELECT * FROM gtfs_calendar_dates WHERE service_id = $1 ORDER BY date"#, + ) + .bind(service_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_date(&self, date: &str) -> Result, DomainError> { + let date = chrono::NaiveDate::parse_from_str(date, "%Y%m%d") + .map_err(|e| DomainError::Unexpected(e.to_string()))?; + + let rows = sqlx::query_as::<_, GtfsCalendarDateRow>( + r#"SELECT * FROM gtfs_calendar_dates WHERE date = $1"#, + ) + .bind(date) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsTripRepository { + pool: Arc>, +} + +impl MyGtfsTripRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsTripRepository for MyGtfsTripRepository { + async fn find_by_id(&self, trip_id: &str) -> Result, DomainError> { + let row = + sqlx::query_as::<_, GtfsTripRow>(r#"SELECT * FROM gtfs_trips WHERE trip_id = $1"#) + .bind(trip_id) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_by_route_id(&self, route_id: &str) -> Result, DomainError> { + let rows = + sqlx::query_as::<_, GtfsTripRow>(r#"SELECT * FROM gtfs_trips WHERE route_id = $1"#) + .bind(route_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_service_id(&self, service_id: &str) -> Result, DomainError> { + let rows = + sqlx::query_as::<_, GtfsTripRow>(r#"SELECT * FROM gtfs_trips WHERE service_id = $1"#) + .bind(service_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsStopTimeRepository { + pool: Arc>, +} + +impl MyGtfsStopTimeRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsStopTimeRepository for MyGtfsStopTimeRepository { + async fn get_by_trip_id(&self, trip_id: &str) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsStopTimeRow>( + r#"SELECT * FROM gtfs_stop_times WHERE trip_id = $1 ORDER BY stop_sequence"#, + ) + .bind(trip_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_by_stop_id(&self, stop_id: &str) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsStopTimeRow>( + r#"SELECT * FROM gtfs_stop_times WHERE stop_id = $1 ORDER BY arrival_time"#, + ) + .bind(stop_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } + + async fn get_departures_at_stop( + &self, + stop_id: &str, + from_time: &str, + limit: Option, + ) -> Result, DomainError> { + let limit = limit.unwrap_or(10) as i64; + + let rows = sqlx::query_as::<_, GtfsStopTimeRow>( + r#"SELECT * FROM gtfs_stop_times + WHERE stop_id = $1 AND departure_time >= $2 + ORDER BY departure_time + LIMIT $3"#, + ) + .bind(stop_id) + .bind(from_time) + .bind(limit) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsShapeRepository { + pool: Arc>, +} + +impl MyGtfsShapeRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsShapeRepository for MyGtfsShapeRepository { + async fn get_by_shape_id(&self, shape_id: &str) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsShapePointRow>( + r#"SELECT * FROM gtfs_shapes WHERE shape_id = $1 ORDER BY shape_pt_sequence"#, + ) + .bind(shape_id) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} + +pub struct MyGtfsFeedInfoRepository { + pool: Arc>, +} + +impl MyGtfsFeedInfoRepository { + pub fn new(pool: Arc>) -> Self { + Self { pool } + } +} + +#[async_trait] +impl GtfsFeedInfoRepository for MyGtfsFeedInfoRepository { + async fn get_latest(&self) -> Result, DomainError> { + let row = sqlx::query_as::<_, GtfsFeedInfoRow>( + r#"SELECT * FROM gtfs_feed_info ORDER BY imported_at DESC LIMIT 1"#, + ) + .fetch_optional(&*self.pool) + .await?; + + Ok(row.map(|r| r.into())) + } + + async fn get_all(&self) -> Result, DomainError> { + let rows = sqlx::query_as::<_, GtfsFeedInfoRow>( + r#"SELECT * FROM gtfs_feed_info ORDER BY imported_at DESC"#, + ) + .fetch_all(&*self.pool) + .await?; + + Ok(rows.into_iter().map(|r| r.into()).collect()) + } +} diff --git a/stationapi/src/infrastructure/line_repository.rs b/stationapi/src/infrastructure/line_repository.rs index c5e5bf3f..b90dcc97 100644 --- a/stationapi/src/infrastructure/line_repository.rs +++ b/stationapi/src/infrastructure/line_repository.rs @@ -3,7 +3,9 @@ use sqlx::{PgConnection, Pool, Postgres}; use std::sync::Arc; use crate::domain::{ - entity::line::Line, error::DomainError, repository::line_repository::LineRepository, + entity::{gtfs::TransportType, line::Line}, + error::DomainError, + repository::line_repository::LineRepository, }; #[derive(sqlx::FromRow, Clone)] @@ -37,6 +39,7 @@ pub struct LineRow { pub station_cd: Option, pub station_g_cd: Option, pub type_cd: Option, + pub transport_type: Option, } impl From for Line { @@ -75,6 +78,7 @@ impl From for Line { station_g_cd: row.station_g_cd, average_distance: row.average_distance, type_cd: row.type_cd, + transport_type: TransportType::from(row.transport_type.unwrap_or(0)), } } } @@ -189,7 +193,8 @@ impl InternalLineRepository { CAST(NULL AS INTEGER) AS line_group_cd, CAST(NULL AS INTEGER) AS station_cd, CAST(NULL AS INTEGER) AS station_g_cd, - CAST(NULL AS INTEGER) AS type_cd + CAST(NULL AS INTEGER) AS type_cd, + l.transport_type FROM lines AS l WHERE l.line_cd = $1 AND l.e_status = 0", @@ -241,12 +246,13 @@ impl InternalLineRepository { COALESCE(alias_data.line_name_r, l.line_name_r) AS line_name_r, COALESCE(alias_data.line_name_zh, l.line_name_zh) AS line_name_zh, COALESCE(alias_data.line_name_ko, l.line_name_ko) AS line_name_ko, - COALESCE(alias_data.line_color_c, l.line_color_c) AS line_color_c + COALESCE(alias_data.line_color_c, l.line_color_c) AS line_color_c, + l.transport_type FROM lines AS l JOIN stations AS s ON s.station_cd = $1 - JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1 + LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1 LEFT JOIN ( - SELECT DISTINCT ON (la.station_cd) + SELECT DISTINCT ON (la.station_cd) la.station_cd, a.line_name, a.line_name_k, @@ -313,7 +319,8 @@ impl InternalLineRepository { CAST(NULL AS INTEGER) AS line_group_cd, CAST(NULL AS INTEGER) AS station_cd, CAST(NULL AS INTEGER) AS station_g_cd, - CAST(NULL AS INTEGER) AS type_cd + CAST(NULL AS INTEGER) AS type_cd, + transport_type FROM lines WHERE line_cd IN ( {params} ) AND e_status = 0" ); @@ -363,11 +370,12 @@ impl InternalLineRepository { sst.line_group_cd, sst.type_cd, s.station_cd, - s.station_g_cd + s.station_g_cd, + l.transport_type FROM lines AS l JOIN stations AS s ON s.station_g_cd = $1 AND s.e_status = 0 - JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1 + LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd AND sst.pass <> 1 WHERE l.line_cd = s.line_cd AND l.e_status = 0", station_group_id @@ -421,7 +429,8 @@ impl InternalLineRepository { COALESCE(a.line_name_r, l.line_name_r) AS line_name_r, COALESCE(a.line_name_zh, l.line_name_zh) AS line_name_zh, COALESCE(a.line_name_ko, l.line_name_ko) AS line_name_ko, - COALESCE(a.line_color_c, l.line_color_c) AS line_color_c + COALESCE(a.line_color_c, l.line_color_c) AS line_color_c, + l.transport_type FROM lines AS l JOIN stations AS s ON s.station_g_cd IN ( {params} ) AND s.e_status = 0 @@ -483,7 +492,8 @@ impl InternalLineRepository { l.line_name_r, l.line_name_zh, l.line_name_ko, - l.line_color_c + l.line_color_c, + l.transport_type FROM lines AS l JOIN station_station_types AS sst ON sst.line_group_cd = $1 AND sst.pass <> 1 JOIN stations AS s ON s.station_cd = sst.station_cd @@ -542,7 +552,8 @@ impl InternalLineRepository { sst.line_group_cd, sst.type_cd, s.station_cd, - s.station_g_cd + s.station_g_cd, + l.transport_type FROM lines AS l JOIN station_station_types AS sst ON sst.line_group_cd IN ( {params} ) AND sst.pass <> 1 JOIN stations AS s ON s.station_cd = sst.station_cd AND s.e_status = 0 @@ -604,7 +615,8 @@ impl InternalLineRepository { sst.line_group_cd, sst.type_cd, s.station_cd, - s.station_g_cd + s.station_g_cd, + l.transport_type FROM lines AS l JOIN station_station_types AS sst ON sst.line_group_cd = ANY($1) AND sst.pass <> 1 JOIN stations AS s ON s.station_cd = sst.station_cd AND s.e_status = 0 AND s.line_cd = l.line_cd @@ -660,7 +672,8 @@ impl InternalLineRepository { CAST(NULL AS INTEGER) AS line_group_cd, CAST(NULL AS INTEGER) AS station_cd, CAST(NULL AS INTEGER) AS station_g_cd, - CAST(NULL AS INTEGER) AS type_cd + CAST(NULL AS INTEGER) AS type_cd, + l.transport_type FROM lines AS l WHERE ( l.line_name LIKE $1 @@ -920,6 +933,7 @@ mod tests { station_cd: Some(101), station_g_cd: Some(201), type_cd: Some(1), + transport_type: Some(0), }; let line: Line = line_row.into(); @@ -975,6 +989,7 @@ mod tests { station_cd: None, station_g_cd: None, type_cd: None, + transport_type: None, }; let line: Line = line_row.into(); diff --git a/stationapi/src/infrastructure/station_repository.rs b/stationapi/src/infrastructure/station_repository.rs index 257ecccf..621fff40 100644 --- a/stationapi/src/infrastructure/station_repository.rs +++ b/stationapi/src/infrastructure/station_repository.rs @@ -4,7 +4,8 @@ use std::sync::Arc; use crate::{ domain::{ - entity::station::Station, error::DomainError, + entity::{gtfs::TransportType, station::Station}, + error::DomainError, repository::station_repository::StationRepository, }, proto::StopCondition, @@ -76,6 +77,7 @@ struct StationRow { pub color: Option, pub direction: Option, pub kind: Option, + pub transport_type: Option, } impl From for Station { @@ -155,6 +157,7 @@ impl From for Station { color: row.color, direction: row.direction, kind: row.kind, + transport_type: TransportType::from(row.transport_type.unwrap_or(0)), } } } @@ -183,18 +186,26 @@ impl StationRepository for MyStationRepository { &self, line_id: u32, station_id: Option, + direction_id: Option, ) -> Result, DomainError> { let mut conn = self.pool.acquire().await?; match station_id { Some(station_id) => { InternalStationRepository::get_by_line_id_and_station_id( - line_id, station_id, &mut conn, + line_id, + station_id, + direction_id, + &mut conn, ) .await } None => { - InternalStationRepository::get_by_line_id_without_train_types(line_id, &mut conn) - .await + InternalStationRepository::get_by_line_id_without_train_types( + line_id, + direction_id, + &mut conn, + ) + .await } } } @@ -220,9 +231,17 @@ impl StationRepository for MyStationRepository { latitude: f64, longitude: f64, limit: Option, + transport_type: Option, ) -> Result, DomainError> { let mut conn = self.pool.acquire().await?; - InternalStationRepository::get_by_coordinates(latitude, longitude, limit, &mut conn).await + InternalStationRepository::get_by_coordinates( + latitude, + longitude, + limit, + transport_type, + &mut conn, + ) + .await } async fn get_by_name( @@ -230,12 +249,14 @@ impl StationRepository for MyStationRepository { station_name: String, limit: Option, from_station_group_id: Option, + transport_type: Option, ) -> Result, DomainError> { let mut conn = self.pool.acquire().await?; InternalStationRepository::get_by_name( station_name, limit, from_station_group_id, + transport_type, &mut conn, ) .await @@ -348,7 +369,8 @@ impl InternalStationRepository { t.type_name_ko, t.color, t.direction, - t.kind + t.kind, + s.transport_type FROM stations AS s JOIN lines AS l ON l.line_cd = s.line_cd LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd @@ -452,7 +474,8 @@ impl InternalStationRepository { NULL::text AS type_name_ko, NULL::text AS color, NULL::int AS direction, - NULL::int AS kind + NULL::int AS kind, + s.transport_type FROM stations AS s JOIN lines AS l ON l.line_cd = s.line_cd AND l.e_status = 0 LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd @@ -465,7 +488,7 @@ impl InternalStationRepository { s.station_name_r, s.station_name_rn, s.station_name_zh, s.station_name_ko, s.station_number1, s.station_number2, s.station_number3, s.station_number4, s.three_letter_code, s.line_cd, s.pref_cd, s.post, s.address, s.lon, s.lat, - s.open_ymd, s.close_ymd, s.e_status, s.e_sort, l.company_cd, l.line_type, + s.open_ymd, s.close_ymd, s.e_status, s.e_sort, s.transport_type, l.company_cd, l.line_type, l.line_symbol1, l.line_symbol2, l.line_symbol3, l.line_symbol4, l.line_symbol1_color, l.line_symbol2_color, l.line_symbol3_color, l.line_symbol4_color, l.line_symbol1_shape, l.line_symbol2_shape, l.line_symbol3_shape, l.line_symbol4_shape, @@ -492,11 +515,18 @@ impl InternalStationRepository { async fn get_by_line_id_without_train_types( line_id: u32, + direction_id: Option, conn: &mut PgConnection, ) -> Result, DomainError> { - let rows = sqlx::query_as!( - StationRow, - r#"SELECT + // When direction_id = 1 (上り) or 2 (下り), reverse the order + let order_clause = if matches!(direction_id, Some(1) | Some(2)) { + "ORDER BY s.e_sort DESC, s.station_cd DESC" + } else { + "ORDER BY s.e_sort ASC, s.station_cd ASC" + }; + + let query_str = format!( + r#"SELECT s.station_cd, s.station_g_cd, s.station_name, @@ -554,19 +584,22 @@ impl InternalStationRepository { NULL::text AS type_name_ko, NULL::text AS color, NULL::int AS direction, - NULL::int AS kind - FROM stations AS s - JOIN lines AS l ON l.line_cd = s.line_cd - LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd - LEFT JOIN aliases AS a ON a.id = la.alias_cd + NULL::int AS kind, + s.transport_type + FROM stations AS s + JOIN lines AS l ON l.line_cd = s.line_cd + LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd + LEFT JOIN aliases AS a ON a.id = la.alias_cd WHERE l.line_cd = $1 AND s.e_status = 0 AND l.e_status = 0 - ORDER BY s.e_sort, s.station_cd ASC"#, - line_id as i32 - ) - .fetch_all(conn) - .await?; + {order_clause}"# + ); + + let rows = sqlx::query_as::<_, StationRow>(&query_str) + .bind(line_id as i32) + .fetch_all(conn) + .await?; let stations: Vec = rows.into_iter().map(|row| row.into()).collect(); @@ -576,6 +609,7 @@ impl InternalStationRepository { async fn get_by_line_id_and_station_id( line_id: u32, station_id: u32, + direction_id: Option, conn: &mut PgConnection, ) -> Result, DomainError> { let stations: Vec = match Self::fetch_has_local_train_types_by_station_id( @@ -584,9 +618,15 @@ impl InternalStationRepository { .await? { true => { - let rows = sqlx::query_as!( - StationRow, - r#"WITH target_line_group AS ( + // When direction_id = 1 (上り) or 2 (下り), reverse the order + let order_clause = if matches!(direction_id, Some(1) | Some(2)) { + "ORDER BY sst.id DESC" + } else { + "ORDER BY sst.id ASC" + }; + + let query_str = format!( + r#"WITH target_line_group AS ( SELECT sst_inner.line_group_cd FROM station_station_types AS sst_inner LEFT JOIN types AS t_inner ON sst_inner.type_cd = t_inner.type_cd @@ -655,7 +695,8 @@ impl InternalStationRepository { t.kind, sst.id AS sst_id, sst.line_group_cd, - sst.pass + sst.pass, + s.transport_type FROM stations AS s JOIN station_station_types AS sst ON sst.line_group_cd = (SELECT line_group_cd FROM target_line_group) AND sst.station_cd = s.station_cd JOIN types AS t ON t.type_cd = sst.type_cd @@ -664,14 +705,16 @@ impl InternalStationRepository { LEFT JOIN aliases AS a ON a.id = la.alias_cd WHERE s.e_status = 0 AND l.e_status = 0 - ORDER BY sst.id"#, - station_id as i32 - ) + {order_clause}"# + ); + + let rows = sqlx::query_as::<_, StationRow>(&query_str) + .bind(station_id as i32) .fetch_all(conn) .await?; rows.into_iter().map(|row| row.into()).collect() } - false => Self::get_by_line_id_without_train_types(line_id, conn).await?, + false => Self::get_by_line_id_without_train_types(line_id, direction_id, conn).await?, }; Ok(stations) @@ -740,7 +783,8 @@ impl InternalStationRepository { t.type_name_ko, t.color, t.direction, - t.kind + t.kind, + s.transport_type FROM stations AS s JOIN lines AS l ON l.line_cd = s.line_cd @@ -834,12 +878,13 @@ impl InternalStationRepository { t.type_name_ko, t.color, t.direction, - t.kind + t.kind, + s.transport_type FROM stations AS s JOIN lines AS l ON l.line_cd = s.line_cd AND l.e_status = 0 LEFT JOIN station_station_types AS sst ON sst.station_cd = s.station_cd - LEFT JOIN types AS t ON t.type_cd = sst.type_cd + LEFT JOIN types AS t ON t.type_cd = sst.type_cd LEFT JOIN line_aliases AS la ON la.station_cd = s.station_cd LEFT JOIN aliases AS a ON a.id = la.alias_cd WHERE @@ -863,8 +908,11 @@ impl InternalStationRepository { latitude: f64, longitude: f64, limit: Option, + transport_type: Option, conn: &mut PgConnection, ) -> Result, DomainError> { + let transport_type_value: Option = transport_type.map(|t| t as i32); + let rows = sqlx::query_as::<_, StationRow>( r#"SELECT s.station_cd, @@ -924,7 +972,8 @@ impl InternalStationRepository { NULL::text AS type_name_ko, NULL::text AS color, NULL::int AS direction, - NULL::int AS kind + NULL::int AS kind, + s.transport_type FROM stations AS s JOIN lines AS l ON s.line_cd = l.line_cd @@ -933,12 +982,14 @@ impl InternalStationRepository { LEFT JOIN aliases AS a ON a.id = la.alias_cd WHERE s.e_status = 0 + AND ($4::int IS NULL OR COALESCE(s.transport_type, 0) = $4) ORDER BY point(s.lat, s.lon) <-> point($1, $2) LIMIT $3"#, ) .bind(latitude) .bind(longitude) .bind(limit.unwrap_or(1) as i32) + .bind(transport_type_value) .fetch_all(&mut *conn) .await?; @@ -951,11 +1002,13 @@ impl InternalStationRepository { station_name: String, limit: Option, from_station_group_id: Option, + transport_type: Option, conn: &mut PgConnection, ) -> Result, DomainError> { let station_name = &(format!("%{station_name}%")); let limit = limit.map(|v| v as i64); let from_station_group_id = from_station_group_id.map(|id| id as i32); + let transport_type_value: Option = transport_type.map(|t| t as i32); let rows = sqlx::query_as!( StationRow, @@ -1026,7 +1079,8 @@ impl InternalStationRepository { NULL::text AS type_name_ko, NULL::text AS color, NULL::int AS direction, - NULL::int AS kind + NULL::int AS kind, + s.transport_type FROM stations AS s LEFT JOIN from_stations AS fs ON fs.station_cd IS NOT NULL @@ -1052,6 +1106,7 @@ impl InternalStationRepository { OR s.station_name_ko LIKE $6 ) AND s.e_status = 0 + AND ($8::int IS NULL OR COALESCE(s.transport_type, 0) = $8) AND ( ( from_sst.id IS NOT NULL @@ -1077,7 +1132,8 @@ impl InternalStationRepository { station_name, station_name, station_name, - limit + limit, + transport_type_value ) .fetch_all(conn) .await?; @@ -1151,7 +1207,8 @@ impl InternalStationRepository { t.type_name_ko, t.color, t.direction, - t.kind + t.kind, + s.transport_type FROM stations AS s JOIN lines AS l ON l.line_cd = s.line_cd AND l.e_status = 0 LEFT JOIN station_station_types AS sst ON sst.line_group_cd = $1 @@ -1303,7 +1360,8 @@ impl InternalStationRepository { NULL::text AS type_name_ko, NULL::text AS color, NULL::int AS direction, - NULL::int AS kind + NULL::int AS kind, + sta.transport_type FROM stations AS sta JOIN common_lines AS cl ON sta.line_cd = cl.line_cd @@ -1437,7 +1495,8 @@ impl InternalStationRepository { tt.type_name_ko, tt.color, tt.direction, - tt.kind + tt.kind, + sta.transport_type FROM stations AS sta LEFT JOIN sst_cte AS sst ON sst.station_cd = sta.station_cd @@ -1529,6 +1588,7 @@ mod tests { color: Some("#008000".to_string()), direction: Some(0), kind: Some(0), + transport_type: Some(0), }; let station: Station = station_row.into(); @@ -1617,6 +1677,7 @@ mod tests { color: None, direction: None, kind: None, + transport_type: None, }; let station: Station = station_row.into(); diff --git a/stationapi/src/lib.rs b/stationapi/src/lib.rs index 6b78f653..22e1e70e 100644 --- a/stationapi/src/lib.rs +++ b/stationapi/src/lib.rs @@ -1,3 +1,4 @@ +pub mod config; pub mod domain; pub mod infrastructure; pub mod presentation; diff --git a/stationapi/src/main.rs b/stationapi/src/main.rs index c9dd22f3..58f4c184 100644 --- a/stationapi/src/main.rs +++ b/stationapi/src/main.rs @@ -1,6 +1,8 @@ -use csv::{ReaderBuilder, StringRecord}; +mod import; + use sqlx::postgres::PgPoolOptions; use sqlx::{Connection, PgConnection}; +use stationapi::config::fetch_database_url; use stationapi::infrastructure::company_repository::MyCompanyRepository; use stationapi::infrastructure::line_repository::MyLineRepository; use stationapi::infrastructure::station_repository::MyStationRepository; @@ -12,131 +14,13 @@ use stationapi::use_case::interactor::query::QueryInteractor; use tonic::transport::Server; use tonic_health::server::HealthReporter; -use std::path::Path; use std::sync::Arc; use std::{ env::{self, VarError}, - fs, net::{AddrParseError, SocketAddr}, }; use tracing::{info, warn}; -async fn import_csv() -> Result<(), Box> { - let db_url = fetch_database_url(); - let mut conn = PgConnection::connect(&db_url).await?; - let data_path = Path::new("data"); - - // Ensure required extensions exist before running schema import - sqlx::query("CREATE EXTENSION IF NOT EXISTS pg_trgm") - .execute(&mut conn) - .await?; - - sqlx::query("CREATE EXTENSION IF NOT EXISTS btree_gist") - .execute(&mut conn) - .await?; - - let create_sql_path = data_path.join("create_table.sql"); - let create_sql_content = fs::read(&create_sql_path).map_err(|e| { - tracing::error!("Failed to read create_table.sql: {}", e); - Box::new(e) as Box - })?; - let create_sql: String = String::from_utf8_lossy(&create_sql_content).parse()?; - sqlx::raw_sql(&create_sql).execute(&mut conn).await?; - let entries = fs::read_dir(data_path).map_err(|e| { - tracing::error!("Failed to read data directory: {}", e); - Box::new(e) as Box - })?; - - let mut file_list: Vec<_> = entries - .filter_map(|entry| { - let path = entry.ok()?.path(); - if path.is_file() && path.extension()? == "csv" && path.to_string_lossy().contains('!') - { - Some(path.file_name()?.to_string_lossy().into_owned()) - } else { - None - } - }) - .collect(); - file_list.sort(); - - for file_name in &file_list { - let mut rdr = ReaderBuilder::new().from_path(data_path.join(file_name))?; - - let headers_record = rdr.headers()?; - let headers: Vec = headers_record - .into_iter() - .map(|row| row.to_string()) - .collect(); - - let mut csv_data: Vec = Vec::new(); - let records: Vec = rdr.records().filter_map(|row| row.ok()).collect(); - csv_data.extend(records); - - let table_name = match file_name.split('!').nth(1) { - Some(part) => match part.split('.').next() { - Some(name) if !name.is_empty() => name, - _ => { - tracing::warn!("Invalid file name format: {}", file_name); - continue; - } - }, - None => { - tracing::warn!("Invalid file name format: {}", file_name); - continue; - } - }; - - let mut sql_lines_inner = Vec::new(); - sql_lines_inner.push(format!("INSERT INTO public.{table_name} VALUES ")); - - for (idx, data) in csv_data.iter().enumerate() { - let cols: Vec<_> = data - .iter() - .enumerate() - .filter_map(|(col_idx, col)| { - if headers - .get(col_idx) - .unwrap_or(&String::new()) - .starts_with('#') - { - return None; - } - - if col.is_empty() { - Some("NULL".to_string()) - } else if col == "DEFAULT" { - Some("DEFAULT".to_string()) - } else { - Some(format!( - "'{}'", - col.replace('\'', "''").replace('\\', "\\\\") - )) - } - }) - .collect(); - - let values_part = cols.join(","); - let separator = if idx == csv_data.len() - 1 { - ");" - } else { - ")," - }; - sql_lines_inner.push(format!("({values_part}{separator}")); - } - - sqlx::query(&sql_lines_inner.concat()) - .execute(&mut conn) - .await?; - } - - sqlx::query("ANALYZE;").execute(&mut conn).await?; - - info!("CSV import completed successfully."); - - Ok(()) -} - #[derive(sqlx::FromRow)] struct AliveRow { pub alive: Option, @@ -148,7 +32,7 @@ async fn station_api_service_status(mut reporter: HealthReporter) { Ok(conn) => conn, Err(e) => { tracing::error!("Failed to connect to database: {}", e); - panic!("Failed to connect to database: {e}"); // または適切な回復戦略 + panic!("Failed to connect to database: {e}"); } }; // NOTE: 今までの障害でDBのデータが一部だけ消えたという現象はなかったので駅数だけ見ればいい @@ -179,10 +63,27 @@ async fn run() -> std::result::Result<(), anyhow::Error> { warn!("Could not load .env.local"); }; - if let Err(e) = import_csv().await { + if let Err(e) = import::import_csv().await { return Err(anyhow::anyhow!("Failed to import CSV: {}", e)); } + // Import GTFS data (ToeiBus) + if let Err(e) = import::import_gtfs().await { + warn!( + "Failed to import GTFS data: {}. Continuing without GTFS data.", + e + ); + } + + // Integrate GTFS data into stations/lines tables + // This is wrapped in a transaction - if any step fails, all changes are rolled back + if let Err(e) = import::integrate_gtfs_to_stations().await { + return Err(anyhow::anyhow!( + "Failed to integrate GTFS to stations (transaction rolled back): {}", + e + )); + } + let db_url = &fetch_database_url(); let pool = Arc::new( match PgPoolOptions::new() @@ -193,7 +94,7 @@ async fn run() -> std::result::Result<(), anyhow::Error> { Ok(conn) => conn, Err(e) => { tracing::error!("Failed to connect to database: {}", e); - panic!("Failed to connect to database: {e}"); // または適切な回復戦略 + panic!("Failed to connect to database: {e}"); } }, ); @@ -275,14 +176,6 @@ fn fetch_addr() -> Result { } } -fn fetch_database_url() -> String { - match env::var("DATABASE_URL") { - Ok(s) => s, - Err(env::VarError::NotPresent) => panic!("$DATABASE_URL is not set."), - Err(VarError::NotUnicode(_)) => panic!("$DATABASE_URL should be written in Unicode."), - } -} - fn fetch_disable_grpc_web_flag() -> bool { match env::var("DISABLE_GRPC_WEB") { Ok(s) => s.parse().expect("Failed to parse $DISABLE_GRPC_WEB"), diff --git a/stationapi/src/presentation/controller/grpc.rs b/stationapi/src/presentation/controller/grpc.rs index 37938e19..f93b85eb 100644 --- a/stationapi/src/presentation/controller/grpc.rs +++ b/stationapi/src/presentation/controller/grpc.rs @@ -1,4 +1,5 @@ use crate::{ + domain::entity::gtfs::TransportType, infrastructure::{ company_repository::MyCompanyRepository, line_repository::MyLineRepository, station_repository::MyStationRepository, train_type_repository::MyTrainTypeRepository, @@ -11,12 +12,22 @@ use crate::{ GetStationByLineIdRequest, GetStationsByLineGroupIdRequest, GetStationsByNameRequest, GetTrainTypesByStationIdRequest, MultipleLineResponse, MultipleStationResponse, MultipleTrainTypeResponse, Route, RouteMinimalResponse, RouteResponse, RouteTypeResponse, - SingleLineResponse, SingleStationResponse, + SingleLineResponse, SingleStationResponse, TransportType as GrpcTransportType, }, use_case::{interactor::query::QueryInteractor, traits::query::QueryUseCase}, }; use tonic::Response; +/// Convert optional proto TransportType to domain TransportType +fn convert_transport_type(proto_type: Option) -> Option { + match proto_type.and_then(|v| GrpcTransportType::try_from(v).ok()) { + Some(GrpcTransportType::Rail) => Some(TransportType::Rail), + Some(GrpcTransportType::Bus) => Some(TransportType::Bus), + Some(GrpcTransportType::RailAndBus) => None, // Rail + nearby bus stops + _ => Some(TransportType::Rail), // Default: rail only + } +} + pub struct MyApi { pub query_use_case: QueryInteractor< MyStationRepository, @@ -32,9 +43,15 @@ impl StationApi for MyApi { &self, request: tonic::Request, ) -> Result, tonic::Status> { - let station_id = request.get_ref().id; + let request_ref = request.get_ref(); + let station_id = request_ref.id; + let transport_type = convert_transport_type(request_ref.transport_type); - let station = match self.query_use_case.find_station_by_id(station_id).await { + let station = match self + .query_use_case + .find_station_by_id(station_id, transport_type) + .await + { Ok(Some(station)) => station, Ok(None) => { return Err(PresentationalError::NotFound(format!( @@ -56,11 +73,13 @@ impl StationApi for MyApi { &self, request: tonic::Request, ) -> Result, tonic::Status> { - let station_ids = &request.get_ref().ids; + let request_ref = request.get_ref(); + let station_ids = &request_ref.ids; + let transport_type = convert_transport_type(request_ref.transport_type); let stations = match self .query_use_case - .get_stations_by_id_vec(station_ids) + .get_stations_by_id_vec(station_ids, transport_type) .await { Ok(stations) => stations, @@ -78,9 +97,15 @@ impl StationApi for MyApi { &self, request: tonic::Request, ) -> Result, tonic::Status> { - let group_id = request.get_ref().group_id; + let request_ref = request.get_ref(); + let group_id = request_ref.group_id; + let transport_type = convert_transport_type(request_ref.transport_type); - match self.query_use_case.get_stations_by_group_id(group_id).await { + match self + .query_use_case + .get_stations_by_group_id(group_id, transport_type) + .await + { Ok(stations) => { return Ok(Response::new(MultipleStationResponse { stations: stations.into_iter().map(|station| station.into()).collect(), @@ -97,9 +122,10 @@ impl StationApi for MyApi { let latitude = request_ref.latitude; let longitude = request_ref.longitude; let limit = request_ref.limit; + let transport_type = convert_transport_type(request_ref.transport_type); let stations = match self .query_use_case - .get_stations_by_coordinates(latitude, longitude, limit) + .get_stations_by_coordinates(latitude, longitude, limit, transport_type) .await { Ok(stations) => stations, @@ -114,12 +140,15 @@ impl StationApi for MyApi { &self, request: tonic::Request, ) -> Result, tonic::Status> { - let line_id = request.get_ref().line_id; - let station_id = request.get_ref().station_id; + let request_ref = request.get_ref(); + let line_id = request_ref.line_id; + let station_id = request_ref.station_id; + let direction_id = request_ref.direction_id; + let transport_type = convert_transport_type(request_ref.transport_type); match self .query_use_case - .get_stations_by_line_id(line_id, station_id) + .get_stations_by_line_id(line_id, station_id, direction_id, transport_type) .await { Ok(stations) => { @@ -138,12 +167,14 @@ impl StationApi for MyApi { let query_station_name = &request_ref.station_name; let query_limit = request_ref.limit; let from_station_group_id = request_ref.from_station_group_id; + let transport_type = convert_transport_type(request_ref.transport_type); match self .query_use_case .get_stations_by_name( query_station_name.to_string(), query_limit, from_station_group_id, + transport_type, ) .await { @@ -162,10 +193,11 @@ impl StationApi for MyApi { ) -> Result, tonic::Status> { let request_ref = request.get_ref(); let query_line_group_id = request_ref.line_group_id; + let transport_type = convert_transport_type(request_ref.transport_type); match self .query_use_case - .get_stations_by_line_group_id(query_line_group_id) + .get_stations_by_line_group_id(query_line_group_id, transport_type) .await { Ok(stations) => { @@ -351,3 +383,574 @@ impl StationApi for MyApi { } } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + domain::entity::{ + company::Company, gtfs::TransportType, line::Line, line_symbol::LineSymbol, + station::Station, station_number::StationNumber, train_type::TrainType, + }, + proto::RouteMinimalResponse, + use_case::{error::UseCaseError, traits::query::QueryUseCase}, + }; + use async_trait::async_trait; + use std::sync::{Arc, Mutex}; + + // ============================================ + // Unit tests for convert_transport_type + // ============================================ + + #[test] + fn test_convert_transport_type_rail() { + let rail_value = Some(GrpcTransportType::Rail as i32); + let result = convert_transport_type(rail_value); + assert_eq!(result, Some(TransportType::Rail)); + } + + #[test] + fn test_convert_transport_type_bus() { + let bus_value = Some(GrpcTransportType::Bus as i32); + let result = convert_transport_type(bus_value); + assert_eq!(result, Some(TransportType::Bus)); + } + + #[test] + fn test_convert_transport_type_unspecified() { + let unspecified_value = Some(GrpcTransportType::Unspecified as i32); + let result = convert_transport_type(unspecified_value); + assert_eq!(result, Some(TransportType::Rail)); + } + + #[test] + fn test_convert_transport_type_rail_and_bus() { + let value = Some(GrpcTransportType::RailAndBus as i32); + let result = convert_transport_type(value); + assert_eq!(result, None); + } + + #[test] + fn test_convert_transport_type_none() { + let result = convert_transport_type(None); + assert_eq!(result, Some(TransportType::Rail)); + } + + #[test] + fn test_convert_transport_type_unknown_value() { + let unknown_value = Some(999); + let result = convert_transport_type(unknown_value); + assert_eq!(result, Some(TransportType::Rail)); + } + + #[test] + fn test_convert_transport_type_negative_value() { + let negative_value = Some(-1); + let result = convert_transport_type(negative_value); + assert_eq!(result, Some(TransportType::Rail)); + } + + // ============================================ + // Mock QueryUseCase for integration tests + // ============================================ + + /// Tracks which transport_type filter was passed to the mock + #[derive(Clone, Default)] + struct MockQueryUseCase { + /// Captured transport_type from get_stations_by_coordinates calls + captured_coordinates_transport_type: Arc>>>, + /// Captured transport_type from get_stations_by_name calls + captured_name_transport_type: Arc>>>, + /// Stations to return (can be configured per test) + stations_to_return: Arc>>, + } + + impl MockQueryUseCase { + fn with_stations(stations: Vec) -> Self { + Self { + stations_to_return: Arc::new(Mutex::new(stations)), + ..Default::default() + } + } + + fn get_captured_coordinates_transport_type(&self) -> Option> { + self.captured_coordinates_transport_type + .lock() + .unwrap() + .clone() + } + + fn get_captured_name_transport_type(&self) -> Option> { + self.captured_name_transport_type.lock().unwrap().clone() + } + } + + fn create_test_station(id: u32, transport_type: TransportType) -> Station { + Station { + station_cd: id as i32, + station_g_cd: id as i32, + station_name: format!("Station {}", id), + station_name_k: "テスト駅".to_string(), + station_name_r: Some("Test Station".to_string()), + station_name_zh: None, + station_name_ko: None, + station_numbers: vec![], + station_number1: None, + station_number2: None, + station_number3: None, + station_number4: None, + three_letter_code: None, + line_cd: 1, + line: None, + lines: vec![], + pref_cd: 13, + post: "100-0001".to_string(), + address: "Test Address".to_string(), + lon: 139.7673068, + lat: 35.6809591, + open_ymd: "19900101".to_string(), + close_ymd: "99991231".to_string(), + e_status: 0, + e_sort: 1, + stop_condition: crate::proto::StopCondition::All, + distance: None, + has_train_types: false, + train_type: None, + company_cd: None, + line_name: Some("Test Line".to_string()), + line_name_k: Some("テストライン".to_string()), + line_name_h: Some("テストライン".to_string()), + line_name_r: Some("Test Line".to_string()), + line_name_zh: None, + line_name_ko: None, + line_color_c: None, + line_type: None, + line_symbol1: None, + line_symbol2: None, + line_symbol3: None, + line_symbol4: None, + line_symbol1_color: None, + line_symbol2_color: None, + line_symbol3_color: None, + line_symbol4_color: None, + line_symbol1_shape: None, + line_symbol2_shape: None, + line_symbol3_shape: None, + line_symbol4_shape: None, + average_distance: None, + type_id: None, + sst_id: None, + type_cd: None, + line_group_cd: None, + pass: None, + type_name: None, + type_name_k: None, + type_name_r: None, + type_name_zh: None, + type_name_ko: None, + color: None, + direction: None, + kind: None, + transport_type, + } + } + + #[async_trait] + impl QueryUseCase for MockQueryUseCase { + async fn find_station_by_id( + &self, + _station_id: u32, + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(None) + } + + async fn get_stations_by_id_vec( + &self, + _station_ids: &[u32], + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_stations_by_group_id( + &self, + _station_group_id: u32, + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_stations_by_group_id_vec( + &self, + _station_group_id_vec: &[u32], + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_stations_by_coordinates( + &self, + _latitude: f64, + _longitude: f64, + _limit: Option, + transport_type: Option, + ) -> Result, UseCaseError> { + // Capture the transport_type that was passed + *self.captured_coordinates_transport_type.lock().unwrap() = Some(transport_type); + + // Return stations, filtering by transport_type if specified + let stations = self.stations_to_return.lock().unwrap().clone(); + let filtered: Vec = match transport_type { + Some(tt) => stations + .into_iter() + .filter(|s| s.transport_type == tt) + .collect(), + None => stations, + }; + Ok(filtered) + } + + async fn get_stations_by_line_id( + &self, + _line_id: u32, + _station_id: Option, + _direction_id: Option, + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_stations_by_name( + &self, + _station_name: String, + _limit: Option, + _from_station_group_id: Option, + transport_type: Option, + ) -> Result, UseCaseError> { + // Capture the transport_type that was passed + *self.captured_name_transport_type.lock().unwrap() = Some(transport_type); + + // Return stations, filtering by transport_type if specified + let stations = self.stations_to_return.lock().unwrap().clone(); + let filtered: Vec = match transport_type { + Some(tt) => stations + .into_iter() + .filter(|s| s.transport_type == tt) + .collect(), + None => stations, + }; + Ok(filtered) + } + + async fn find_company_by_id_vec( + &self, + _company_id_vec: &[u32], + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn update_station_vec_with_attributes( + &self, + stations: Vec, + _line_group_id: Option, + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(stations) + } + + async fn get_lines_by_station_group_id( + &self, + _station_group_id: u32, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_lines_by_station_group_id_vec( + &self, + _station_group_id_vec: &[u32], + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + fn get_station_numbers(&self, _station: &Station) -> Vec { + vec![] + } + + fn get_line_symbols(&self, _line: &Line) -> Vec { + vec![] + } + + fn extract_line_from_station(&self, station: &Station) -> Line { + Line { + line_cd: station.line_cd, + company_cd: 0, + company: None, + line_name: "Test Line".to_string(), + line_name_k: "テストライン".to_string(), + line_name_h: "テストライン".to_string(), + line_name_r: Some("Test Line".to_string()), + line_name_zh: None, + line_name_ko: None, + line_color_c: None, + line_type: None, + line_symbols: vec![], + line_symbol1: None, + line_symbol2: None, + line_symbol3: None, + line_symbol4: None, + line_symbol1_color: None, + line_symbol2_color: None, + line_symbol3_color: None, + line_symbol4_color: None, + line_symbol1_shape: None, + line_symbol2_shape: None, + line_symbol3_shape: None, + line_symbol4_shape: None, + e_status: 0, + e_sort: 0, + average_distance: None, + station: None, + train_type: None, + line_group_cd: None, + station_cd: None, + station_g_cd: None, + type_cd: None, + transport_type: TransportType::Rail, + } + } + + async fn get_stations_by_line_group_id( + &self, + _line_group_id: u32, + _transport_type: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_train_types_by_station_id( + &self, + _station_id: u32, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_train_types_by_station_id_vec( + &self, + _station_id_vec: &[u32], + _line_group_id: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_routes( + &self, + _from_station_id: u32, + _to_station_id: u32, + _via_line_id: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_routes_minimal( + &self, + _from_station_id: u32, + _to_station_id: u32, + _via_line_id: Option, + ) -> Result { + Ok(RouteMinimalResponse { + routes: vec![], + lines: vec![], + next_page_token: String::new(), + }) + } + + async fn get_train_types( + &self, + _from_station_id: u32, + _to_station_id: u32, + _via_line_id: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn find_line_by_id(&self, _line_id: u32) -> Result, UseCaseError> { + Ok(None) + } + + async fn get_lines_by_id_vec(&self, _line_ids: &[u32]) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_lines_by_name( + &self, + _line_name: String, + _limit: Option, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + + async fn get_connected_stations( + &self, + _from_station_id: u32, + _to_station_id: u32, + ) -> Result, UseCaseError> { + Ok(vec![]) + } + } + + // ============================================ + // Integration tests for transport_type filtering + // ============================================ + + #[tokio::test] + async fn test_get_stations_by_coordinates_with_rail_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_coordinates(35.0, 139.0, Some(10), Some(TransportType::Rail)) + .await + .unwrap(); + + // Verify the transport_type was captured correctly + assert_eq!( + mock.get_captured_coordinates_transport_type(), + Some(Some(TransportType::Rail)) + ); + + // Verify filtering works + assert_eq!(result.len(), 1); + assert_eq!(result[0].transport_type, TransportType::Rail); + } + + #[tokio::test] + async fn test_get_stations_by_coordinates_with_bus_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_coordinates(35.0, 139.0, Some(10), Some(TransportType::Bus)) + .await + .unwrap(); + + // Verify the transport_type was captured correctly + assert_eq!( + mock.get_captured_coordinates_transport_type(), + Some(Some(TransportType::Bus)) + ); + + // Verify filtering works + assert_eq!(result.len(), 1); + assert_eq!(result[0].transport_type, TransportType::Bus); + } + + #[tokio::test] + async fn test_get_stations_by_coordinates_with_no_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_coordinates(35.0, 139.0, Some(10), None) + .await + .unwrap(); + + // Verify no filter was applied + assert_eq!(mock.get_captured_coordinates_transport_type(), Some(None)); + + // Verify all stations are returned + assert_eq!(result.len(), 2); + } + + #[tokio::test] + async fn test_get_stations_by_name_with_rail_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_name( + "Test".to_string(), + Some(10), + None, + Some(TransportType::Rail), + ) + .await + .unwrap(); + + // Verify the transport_type was captured correctly + assert_eq!( + mock.get_captured_name_transport_type(), + Some(Some(TransportType::Rail)) + ); + + // Verify filtering works + assert_eq!(result.len(), 1); + assert_eq!(result[0].transport_type, TransportType::Rail); + } + + #[tokio::test] + async fn test_get_stations_by_name_with_bus_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_name("Test".to_string(), Some(10), None, Some(TransportType::Bus)) + .await + .unwrap(); + + // Verify the transport_type was captured correctly + assert_eq!( + mock.get_captured_name_transport_type(), + Some(Some(TransportType::Bus)) + ); + + // Verify filtering works + assert_eq!(result.len(), 1); + assert_eq!(result[0].transport_type, TransportType::Bus); + } + + #[tokio::test] + async fn test_get_stations_by_name_with_no_filter() { + let rail_station = create_test_station(1, TransportType::Rail); + let bus_station = create_test_station(2, TransportType::Bus); + let mock = MockQueryUseCase::with_stations(vec![rail_station, bus_station]); + + let result = mock + .get_stations_by_name("Test".to_string(), Some(10), None, None) + .await + .unwrap(); + + // Verify no filter was applied + assert_eq!(mock.get_captured_name_transport_type(), Some(None)); + + // Verify all stations are returned + assert_eq!(result.len(), 2); + } + + #[test] + fn test_convert_transport_type_integration_with_request_extraction() { + // Case 1: Some(Rail) -> Some(TransportType::Rail) + let result = convert_transport_type(Some(GrpcTransportType::Rail as i32)); + assert_eq!(result, Some(TransportType::Rail)); + + // Case 2: Some(Bus) -> Some(TransportType::Bus) + let result = convert_transport_type(Some(GrpcTransportType::Bus as i32)); + assert_eq!(result, Some(TransportType::Bus)); + + // Case 3: Some(Unspecified) -> Some(TransportType::Rail) + let result = convert_transport_type(Some(GrpcTransportType::Unspecified as i32)); + assert_eq!(result, Some(TransportType::Rail)); + + // Case 4: Some(RailAndBus) -> None + let result = convert_transport_type(Some(GrpcTransportType::RailAndBus as i32)); + assert_eq!(result, None); + + // Case 5: None -> Some(TransportType::Rail) + let result = convert_transport_type(None); + assert_eq!(result, Some(TransportType::Rail)); + } +} diff --git a/stationapi/src/use_case/dto/line.rs b/stationapi/src/use_case/dto/line.rs index b7567019..16cbea73 100644 --- a/stationapi/src/use_case/dto/line.rs +++ b/stationapi/src/use_case/dto/line.rs @@ -1,4 +1,7 @@ -use crate::{domain::entity::line::Line, proto::Line as GrpcLine}; +use crate::{ + domain::entity::{gtfs::TransportType, line::Line}, + proto::{Line as GrpcLine, TransportType as GrpcTransportType}, +}; impl From for GrpcLine { fn from(line: Line) -> Self { @@ -20,6 +23,14 @@ impl From for GrpcLine { .train_type .map(|train_type| Box::new(train_type.into())), average_distance: line.average_distance.unwrap_or(0.0), + transport_type: convert_transport_type(line.transport_type), } } } + +fn convert_transport_type(t: TransportType) -> i32 { + match t { + TransportType::Rail => GrpcTransportType::Rail as i32, + TransportType::Bus => GrpcTransportType::Bus as i32, + } +} diff --git a/stationapi/src/use_case/dto/station.rs b/stationapi/src/use_case/dto/station.rs index 9f7db680..44195a60 100644 --- a/stationapi/src/use_case/dto/station.rs +++ b/stationapi/src/use_case/dto/station.rs @@ -1,4 +1,16 @@ -use crate::{domain::entity::station::Station, proto::Station as GrpcStation}; +use crate::{ + domain::entity::{gtfs::TransportType, station::Station}, + proto::{Station as GrpcStation, TransportType as GrpcTransportType}, +}; + +impl From for i32 { + fn from(value: TransportType) -> Self { + match value { + TransportType::Rail => GrpcTransportType::Rail as i32, + TransportType::Bus => GrpcTransportType::Bus as i32, + } + } +} impl From for GrpcStation { fn from(station: Station) -> Self { @@ -30,6 +42,7 @@ impl From for GrpcStation { distance: station.distance, has_train_types: Some(station.has_train_types), train_type: station.train_type.map(|tt| Box::new((*tt).into())), + transport_type: station.transport_type.into(), } } } diff --git a/stationapi/src/use_case/interactor/query.rs b/stationapi/src/use_case/interactor/query.rs index d2314bcd..58fe590d 100644 --- a/stationapi/src/use_case/interactor/query.rs +++ b/stationapi/src/use_case/interactor/query.rs @@ -1,10 +1,13 @@ use std::collections::{BTreeMap, HashSet}; +/// Maximum distance in meters to search for nearby bus stops from a rail station +const NEARBY_BUS_STOP_RADIUS_METERS: f64 = 300.0; + use crate::{ domain::{ entity::{ - company::Company, line::Line, line_symbol::LineSymbol, station::Station, - station_number::StationNumber, train_type::TrainType, + company::Company, gtfs::TransportType, line::Line, line_symbol::LineSymbol, + station::Station, station_number::StationNumber, train_type::TrainType, }, normalize::normalize_for_search, repository::{ @@ -33,24 +36,43 @@ where TR: TrainTypeRepository, CR: CompanyRepository, { - async fn find_station_by_id(&self, station_id: u32) -> Result, UseCaseError> { + async fn find_station_by_id( + &self, + station_id: u32, + transport_type: Option, + ) -> Result, UseCaseError> { let Some(station) = self.station_repository.find_by_id(station_id).await? else { return Ok(None); }; + // Filter by transport_type if specified + if let Some(requested_type) = transport_type { + if station.transport_type != requested_type { + return Ok(None); + } + } let stations = self - .update_station_vec_with_attributes(vec![station], None) + .update_station_vec_with_attributes(vec![station], None, transport_type) .await?; - let station = stations.into_iter().next(); - Ok(station) + Ok(stations.into_iter().next()) } async fn get_stations_by_id_vec( &self, station_ids: &[u32], + transport_type: Option, ) -> Result, UseCaseError> { let stations = self.station_repository.get_by_id_vec(station_ids).await?; + // Filter by transport_type if specified + let stations = if let Some(requested_type) = transport_type { + stations + .into_iter() + .filter(|s| s.transport_type == requested_type) + .collect() + } else { + stations + }; let stations = self - .update_station_vec_with_attributes(stations, None) + .update_station_vec_with_attributes(stations, None, transport_type) .await?; Ok(stations) @@ -58,14 +80,25 @@ where async fn get_stations_by_group_id( &self, station_group_id: u32, + transport_type: Option, ) -> Result, UseCaseError> { let stations = self .station_repository .get_by_station_group_id(station_group_id) .await?; + // Filter by transport_type if specified + let stations = if let Some(requested_type) = transport_type { + stations + .into_iter() + .filter(|s| s.transport_type == requested_type) + .collect() + } else { + stations + }; + let stations = self - .update_station_vec_with_attributes(stations, Some(station_group_id)) + .update_station_vec_with_attributes(stations, Some(station_group_id), transport_type) .await?; Ok(stations) @@ -97,14 +130,15 @@ where latitude: f64, longitude: f64, limit: Option, + transport_type: Option, ) -> Result, UseCaseError> { let stations = self .station_repository - .get_by_coordinates(latitude, longitude, limit) + .get_by_coordinates(latitude, longitude, limit, transport_type) .await?; let stations = self - .update_station_vec_with_attributes(stations, None) + .update_station_vec_with_attributes(stations, None, transport_type) .await?; Ok(stations) @@ -113,10 +147,14 @@ where &self, line_id: u32, station_id: Option, + direction_id: Option, + _transport_type: Option, ) -> Result, UseCaseError> { + // Note: transport_type is ignored for line-based queries + // as mixing bus stops with rail line stations doesn't make sense let stations = self .station_repository - .get_by_line_id(line_id, station_id) + .get_by_line_id(line_id, station_id, direction_id) .await?; let line_group_id = if let Some(sta) = stations @@ -129,7 +167,7 @@ where }; let stations = self - .update_station_vec_with_attributes(stations, line_group_id.map(|id| id as u32)) + .update_station_vec_with_attributes(stations, line_group_id.map(|id| id as u32), None) .await?; Ok(stations) @@ -139,6 +177,7 @@ where station_name: String, limit: Option, from_station_group_id: Option, + transport_type: Option, ) -> Result, UseCaseError> { let stations = self .station_repository @@ -146,11 +185,12 @@ where normalize_for_search(&station_name), limit, from_station_group_id, + transport_type, ) .await?; let stations = self - .update_station_vec_with_attributes(stations, None) + .update_station_vec_with_attributes(stations, None, transport_type) .await?; Ok(stations) @@ -170,6 +210,7 @@ where &self, mut stations: Vec, line_group_id: Option, + transport_type: Option, ) -> Result, UseCaseError> { let station_group_ids = stations .iter() @@ -230,6 +271,19 @@ where .cloned() .collect(); + // For rail stations, add nearby bus routes to lines array + // Only add bus routes if transport_type is not specified or is not Bus-only + let should_include_bus_routes = + transport_type.is_none() || transport_type == Some(TransportType::Rail); + if station.transport_type == TransportType::Rail && should_include_bus_routes { + let nearby_bus_lines = self.get_nearby_bus_lines(station.lat, station.lon).await?; + for bus_line in nearby_bus_lines { + if seen_line_cds.insert(bus_line.line_cd) { + lines.push(bus_line); + } + } + } + for line in lines.iter_mut() { line.company = companies .iter() @@ -277,14 +331,17 @@ where async fn get_stations_by_line_group_id( &self, line_group_id: u32, + _transport_type: Option, ) -> Result, UseCaseError> { + // Note: transport_type is ignored for line-based queries + // as mixing bus stops with rail line stations doesn't make sense let stations = self .station_repository .get_by_line_group_id(line_group_id) .await?; let stations = self - .update_station_vec_with_attributes(stations, Some(line_group_id)) + .update_station_vec_with_attributes(stations, Some(line_group_id), None) .await?; Ok(stations) @@ -393,6 +450,7 @@ where station_g_cd: Some(station.station_g_cd), average_distance: station.average_distance, type_cd: station.type_cd, + transport_type: station.transport_type, } } fn get_line_symbols(&self, line: &Line) -> Vec { @@ -807,6 +865,7 @@ where station_cd: line.station_cd, station_g_cd: line.station_g_cd, type_cd: line.type_cd, + transport_type: line.transport_type, }) .collect::>(); @@ -857,6 +916,60 @@ where TR: TrainTypeRepository, CR: CompanyRepository, { + /// Get bus lines (routes) within 300m radius of the given coordinates + async fn get_nearby_bus_lines( + &self, + ref_lat: f64, + ref_lon: f64, + ) -> Result, crate::use_case::error::UseCaseError> { + let nearby_candidates = self + .station_repository + .get_by_coordinates(ref_lat, ref_lon, Some(50), Some(TransportType::Bus)) + .await?; + + let nearby_bus_stops: Vec = nearby_candidates + .into_iter() + .filter(|bus_stop| { + let distance = haversine_distance(ref_lat, ref_lon, bus_stop.lat, bus_stop.lon); + distance <= NEARBY_BUS_STOP_RADIUS_METERS + }) + .collect(); + + if nearby_bus_stops.is_empty() { + return Ok(vec![]); + } + + // Get bus lines for nearby bus stops + let bus_station_group_ids: Vec = nearby_bus_stops + .iter() + .map(|s| s.station_g_cd as u32) + .collect(); + + let mut bus_lines = self + .line_repository + .get_by_station_group_id_vec(&bus_station_group_ids) + .await?; + + // Add line symbols and filter to only bus lines + let mut seen_line_cds = std::collections::HashSet::new(); + bus_lines.retain(|line| { + line.transport_type == TransportType::Bus && seen_line_cds.insert(line.line_cd) + }); + + for line in bus_lines.iter_mut() { + line.line_symbols = self.get_line_symbols(line); + + // Find the matching bus stop for this line and embed it + if let Some(bus_stop) = nearby_bus_stops.iter().find(|s| s.line_cd == line.line_cd) { + let mut station_copy = bus_stop.clone(); + station_copy.station_numbers = self.get_station_numbers(&station_copy); + line.station = Some(station_copy); + } + } + + Ok(bus_lines) + } + fn build_route_tree_map(&self, stops: &[Station]) -> BTreeMap> { stops.iter().fold( BTreeMap::new(), @@ -942,6 +1055,24 @@ where color: row.color.clone(), direction: row.direction, kind: row.kind, + transport_type: row.transport_type, } } } + +/// Calculate the distance between two points on Earth using the Haversine formula. +/// Returns the distance in meters. +fn haversine_distance(lat1: f64, lon1: f64, lat2: f64, lon2: f64) -> f64 { + const EARTH_RADIUS_METERS: f64 = 6_371_000.0; + + let lat1_rad = lat1.to_radians(); + let lat2_rad = lat2.to_radians(); + let delta_lat = (lat2 - lat1).to_radians(); + let delta_lon = (lon2 - lon1).to_radians(); + + let a = (delta_lat / 2.0).sin().powi(2) + + lat1_rad.cos() * lat2_rad.cos() * (delta_lon / 2.0).sin().powi(2); + let c = 2.0 * a.sqrt().asin(); + + EARTH_RADIUS_METERS * c +} diff --git a/stationapi/src/use_case/traits/query.rs b/stationapi/src/use_case/traits/query.rs index 0aa1abe8..5bd6ae7f 100644 --- a/stationapi/src/use_case/traits/query.rs +++ b/stationapi/src/use_case/traits/query.rs @@ -2,8 +2,8 @@ use async_trait::async_trait; use crate::{ domain::entity::{ - company::Company, line::Line, line_symbol::LineSymbol, station::Station, - station_number::StationNumber, train_type::TrainType, + company::Company, gtfs::TransportType, line::Line, line_symbol::LineSymbol, + station::Station, station_number::StationNumber, train_type::TrainType, }, proto::{Route, RouteMinimalResponse}, use_case::error::UseCaseError, @@ -11,14 +11,20 @@ use crate::{ #[async_trait] pub trait QueryUseCase: Send + Sync + 'static { - async fn find_station_by_id(&self, station_id: u32) -> Result, UseCaseError>; + async fn find_station_by_id( + &self, + station_id: u32, + transport_type: Option, + ) -> Result, UseCaseError>; async fn get_stations_by_id_vec( &self, station_ids: &[u32], + transport_type: Option, ) -> Result, UseCaseError>; async fn get_stations_by_group_id( &self, station_group_id: u32, + transport_type: Option, ) -> Result, UseCaseError>; async fn get_stations_by_group_id_vec( &self, @@ -29,17 +35,21 @@ pub trait QueryUseCase: Send + Sync + 'static { latitude: f64, longitude: f64, limit: Option, + transport_type: Option, ) -> Result, UseCaseError>; async fn get_stations_by_line_id( &self, line_id: u32, station_id: Option, + direction_id: Option, + transport_type: Option, ) -> Result, UseCaseError>; async fn get_stations_by_name( &self, station_name: String, - get_stations_by_name: Option, + limit: Option, from_station_group_id: Option, + transport_type: Option, ) -> Result, UseCaseError>; async fn find_company_by_id_vec( &self, @@ -49,6 +59,7 @@ pub trait QueryUseCase: Send + Sync + 'static { &self, stations: Vec, line_group_id: Option, + transport_type: Option, ) -> Result, UseCaseError>; async fn get_lines_by_station_group_id( &self, @@ -64,6 +75,7 @@ pub trait QueryUseCase: Send + Sync + 'static { async fn get_stations_by_line_group_id( &self, line_group_id: u32, + transport_type: Option, ) -> Result, UseCaseError>; async fn get_train_types_by_station_id( &self,