Created
October 25, 2023 21:54
-
-
Save evansiroky/7a2d2e92e4fd137844ed626ade8d5c12 to your computer and use it in GitHub Desktop.
Migrate Fares v2 and remove route_colors (IBI workflow)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Does some post-processing after receiving an export from IBI data tools. Does these things: | |
# - copies fares v2 data from another folder into the current one | |
# - removes route color column | |
# | |
# Copy fares v2 data from one folder to another. It is assumed that the first input | |
# is a folder that contains the following: | |
# - file: fare_leg_rules.txt | |
# - file: fare_media.txt | |
# - file: fare_products.txt | |
# - file: routes.txt with a column of `network_id` | |
# | |
# the first 3 files are copied verbatim from one folder to another and the network_id info | |
# is copied according to route_id and written into the new folder's routes.txt in a new | |
# network_id column. | |
# | |
# This is mainly to aid a workflow of having to update data in IBI datatools that results in | |
# the loss of Fares v2 data. | |
import argparse | |
import csv | |
import os | |
import shutil | |
# parse CLI args | |
parser = argparse.ArgumentParser(description='Copy Fares v2 data from one folder to another') | |
parser.add_argument( | |
'src', | |
help='Folder containing Fares v2 data' | |
) | |
parser.add_argument( | |
'dst', | |
help='Folder that Fares v2 data should be copied into' | |
) | |
parser.add_argument( | |
'-n', '--network_id', | |
action=argparse.BooleanOptionalAction, | |
help='Copy over network_id from routes.txt' | |
) | |
args = parser.parse_args() | |
def copy_file (filename): | |
from_file = os.path.join(args.src, filename) | |
if os.path.exists(from_file): | |
shutil.copy(from_file, os.path.join(args.dst, filename)) | |
# copy files verbatim | |
copy_file('areas.txt') | |
copy_file('fare_leg_rules.txt') | |
copy_file('fare_media.txt') | |
copy_file('fare_products.txt') | |
copy_file('stop_areas.txt') | |
# copy route network_ids | |
# first load in existing route data | |
with open(os.path.join(args.dst, 'routes.txt')) as dst_routes_file: | |
dst_routes_reader = csv.DictReader(dst_routes_file) | |
dst_routes = [] | |
for row in dst_routes_reader: | |
if 'route_color' in row: | |
del row['route_color'] | |
if 'route_text_color' in row: | |
del row['route_text_color'] | |
dst_routes.append(row) | |
if args.network_id: | |
# open src routes and copy network id according to matching route_ids | |
with open(os.path.join(args.src, 'routes.txt')) as src_routes_file: | |
src_routes_reader = csv.DictReader(src_routes_file) | |
for row in src_routes_reader: | |
for dstRow in dst_routes: | |
if row['route_id'] == dstRow['route_id']: | |
dstRow['network_id'] = row['network_id'] | |
# write updated data | |
with open(os.path.join(args.dst, 'routes.txt'), 'w') as dst_routes_file: | |
dst_routes_writer = csv.DictWriter(dst_routes_file, fieldnames=dst_routes[0].keys()) | |
dst_routes_writer.writeheader() | |
dst_routes_writer.writerows(dst_routes) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment