@@ -44,9 +44,6 @@ const {
44
44
'lower-case-lang-codes' : {
45
45
type : 'boolean' ,
46
46
} ,
47
- 'stops-location-index' : {
48
- type : 'boolean' ,
49
- } ,
50
47
'stats-by-route-date' : {
51
48
type : 'string' ,
52
49
} ,
@@ -59,21 +56,6 @@ const {
59
56
'schema' : {
60
57
type : 'string' ,
61
58
} ,
62
- 'postgraphile' : {
63
- type : 'boolean' ,
64
- } ,
65
- 'postgraphile-password' : {
66
- type : 'string' ,
67
- } ,
68
- 'postgrest' : {
69
- type : 'boolean' ,
70
- } ,
71
- 'postgrest-password' : {
72
- type : 'string' ,
73
- } ,
74
- 'postgrest-query-cost-limit' : {
75
- type : 'string' ,
76
- } ,
77
59
'import-metadata' : {
78
60
type : 'boolean' ,
79
61
}
@@ -84,7 +66,7 @@ const {
84
66
if ( flags . help ) {
85
67
process . stdout . write ( `
86
68
Usage:
87
- gtfs-to-sql [options] [--] <gtfs-file> ...
69
+ import- gtfs-into-duckdb [options] [--] <path-to-duckdb> <gtfs-file> ...
88
70
Options:
89
71
--silent -s Don't show files being converted.
90
72
--require-dependencies -d Require files that the specified GTFS files depend
@@ -102,8 +84,6 @@ Options:
102
84
--routes-without-agency-id Don't require routes.txt items to have an agency_id.
103
85
--stops-without-level-id Don't require stops.txt items to have a level_id.
104
86
Default if levels.txt has not been provided.
105
- --stops-location-index Create a spatial index on stops.stop_loc for efficient
106
- queries by geolocation.
107
87
--lower-case-lang-codes Accept Language Codes (e.g. in feed_info.feed_lang)
108
88
with a different casing than the official BCP-47
109
89
language tags (as specified by the GTFS spec),
@@ -124,34 +104,18 @@ Options:
124
104
currently running trips over time, by hour.
125
105
Like --stats-by-route-date, this flag accepts
126
106
none, view & materialized-view.
127
- --schema The schema to use for the database. Default: public
128
- Even when importing into a schema other than \`public\`,
129
- a function \`public.gtfs_via_postgres_import_version()\`
130
- gets created, to ensure that multiple imports into the
131
- same database are all made using the same version. See
132
- also multiple-datasets.md in the docs.
133
- --postgraphile Tweak generated SQL for PostGraphile usage.
134
- https://www.graphile.org/postgraphile/
135
- --postgraphile-password Password for the PostGraphile PostgreSQL user.
136
- Default: $POSTGRAPHILE_PGPASSWORD, fallback random.
137
- --postgrest Tweak generated SQL for PostgREST usage.
138
- Please combine it with --schema.
139
- https://postgrest.org/
140
- --postgrest-password Password for the PostgREST PostgreSQL user \`web_anon\`.
141
- Default: $POSTGREST_PGPASSWORD, fallback random.
142
- --postgrest-query-cost-limit Define a cost limit [1] for queries executed by PostgREST
143
- on behalf of a user. It is only enforced if
144
- pg_plan_filter [2] is installed in the database!
145
- Must be a positive float. Default: none
146
- [1] https://www.postgresql.org/docs/14/using-explain.html
147
- [2] https://github.com/pgexperts/pg_plan_filter
107
+ --schema The schema to use for the database. Default: main
108
+ May not contain \`.\`.
148
109
--import-metadata Create functions returning import metadata:
149
110
- gtfs_data_imported_at (timestamp with time zone)
150
111
- gtfs_via_postgres_version (text)
151
112
- gtfs_via_postgres_options (jsonb)
113
+ Notes:
114
+ If you just want to check if the GTFS data can be imported but don't care about the
115
+ resulting DuckDB database file, you can import into an in-memory database by specifying
116
+ \`:memory:\` as the <path-to-duckdb>.
152
117
Examples:
153
- gtfs-to-sql some-gtfs/*.txt | sponge | psql -b # import into PostgreSQL
154
- gtfs-to-sql -u -- some-gtfs/*.txt | gzip >gtfs.sql.gz # generate a gzipped SQL dump
118
+ import-gtfs-into-duckdb some-gtfs.duckdb some-gtfs/*.txt
155
119
156
120
[1] https://developers.google.com/transit/gtfs/reference/extended-route-types
157
121
[2] https://groups.google.com/g/gtfs-changes/c/keT5rTPS7Y0/m/71uMz2l6ke0J
@@ -165,11 +129,11 @@ if (flags.version) {
165
129
}
166
130
167
131
const { basename, extname} = require ( 'path' )
168
- const { pipeline} = require ( 'stream' )
169
132
const convertGtfsToSql = require ( './index' )
170
- const DataError = require ( './lib/data-error' )
171
133
172
- const files = args . map ( ( file ) => {
134
+ const [ pathToDb ] = args
135
+
136
+ const files = args . slice ( 1 ) . map ( ( file ) => {
173
137
const name = basename ( file , extname ( file ) )
174
138
return { name, file}
175
139
} )
@@ -185,9 +149,7 @@ const opt = {
185
149
statsByRouteIdAndDate : flags [ 'stats-by-route-date' ] || 'none' ,
186
150
statsByAgencyIdAndRouteIdAndStopAndHour : flags [ 'stats-by-agency-route-stop-hour' ] || 'none' ,
187
151
statsActiveTripsByHour : flags [ 'stats-active-trips-by-hour' ] || 'none' ,
188
- schema : flags [ 'schema' ] || 'public' ,
189
- postgraphile : ! ! flags . postgraphile ,
190
- postgrest : ! ! flags . postgrest ,
152
+ schema : flags [ 'schema' ] || 'main' ,
191
153
importMetadata : ! ! flags [ 'import-metadata' ] ,
192
154
}
193
155
if ( 'stops-without-level-id' in flags ) {
@@ -196,31 +158,11 @@ if ('stops-without-level-id' in flags) {
196
158
if ( 'lower-case-lang-codes' in flags ) {
197
159
opt . lowerCaseLanguageCodes = flags [ 'lower-case-lang-codes' ]
198
160
}
199
- if ( 'postgraphile-password' in flags ) {
200
- opt . postgraphilePassword = flags [ 'postgraphile-password' ]
201
- }
202
- if ( 'postgrest-password' in flags ) {
203
- opt . postgrestPassword = flags [ 'postgrest-password' ]
204
- }
205
- if ( 'postgrest-query-cost-limit' in flags ) {
206
- const limit = parseFloat ( flags [ 'postgrest-query-cost-limit' ] )
207
- if ( ! Number . isFinite ( limit ) || limit < 0 ) {
208
- console . error ( 'Invalid --postgrest-query-cost-limit value.' )
209
- process . exit ( 1 )
210
- }
211
- opt . lowerCaseLanguageCodes = limit
212
- }
213
161
214
- pipeline (
215
- convertGtfsToSql ( files , opt ) ,
216
- process . stdout ,
217
- ( err ) => {
218
- if ( ! err ) return ;
219
- if ( err instanceof DataError ) {
220
- console . error ( String ( err ) )
221
- } else if ( err . code !== 'EPIPE' ) {
222
- console . error ( err )
223
- }
224
- process . exit ( 1 )
162
+ convertGtfsToSql ( pathToDb , files , opt )
163
+ . catch ( ( err ) => {
164
+ if ( err . code !== 'EPIPE' ) { // todo: check still necessary? we don't pipe anymore
165
+ console . error ( err )
225
166
}
226
- )
167
+ process . exit ( 1 )
168
+ } )
0 commit comments