begin work on bus compiler

Wed, 29 Jul 2020 23:45:53 +0300

author
Teemu Piippo <teemu@hecknology.net>
date
Wed, 29 Jul 2020 23:45:53 +0300
changeset 1
f9788970fa46
parent 0
659ab465152e
child 2
7378b802ddf8

begin work on bus compiler

.hgignore file | annotate | diff | comparison | revisions
buses.py file | annotate | diff | comparison | revisions
busroute.py file | annotate | diff | comparison | revisions
compute_regions.py file | annotate | diff | comparison | revisions
datamodel.py file | annotate | diff | comparison | revisions
föli.ini file | annotate | diff | comparison | revisions
föli.osm file | annotate | diff | comparison | revisions
geometry.py file | annotate | diff | comparison | revisions
gtfsc.py file | annotate | diff | comparison | revisions
katakana.py file | annotate | diff | comparison | revisions
misc.py file | annotate | diff | comparison | revisions
regions.py file | annotate | diff | comparison | revisions
--- a/.hgignore	Tue Jul 28 21:51:54 2020 +0300
+++ b/.hgignore	Wed Jul 29 23:45:53 2020 +0300
@@ -1,2 +1,4 @@
 syntax:glob
 gtfs.zip
+__pycache__
+*.db
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/buses.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,263 @@
+
+def old_load_gtfs(gtfs_zip_path):
+	global viimeinen_käyttöpäivä
+	from zipfile import ZipFile
+	with ZipFile(gtfs_zip_path) as gtfs_zip:
+		with gtfs_zip.open('trips.txt') as file:
+			for row in read_csv(map(bytes.decode, file)):
+				if row['service_id'] not in services:
+					services[row['service_id']] = BusService(row['service_id'])
+				route = routes_per_id[row['route_id']]
+				trip = GtfsTrip(
+					reference = row['trip_id'],
+					route = route,
+					service = services[row['service_id']],
+					length = shape_distances.get(row.get('shape_id'), 1) * float(profile['metrics']['shape-modifier']),
+					block_id = row.get('block_id') or row['service_id'],
+					shape = row.get('shape_id')
+				)
+				route.trips.add(trip)
+				if trip.name in all_trips:
+					print('Trip %s already exists' % trip.name)
+				else:
+					all_trips[trip.name] = trip
+		print('%d trips' % len(all_trips), file = stderr)
+
+		def read_date(teksti):
+			return date(int(teksti[:4]), int(teksti[4:6]), int(teksti[6:]))
+
+		def read_time(teksti):
+			hour, minute, second = map(int, teksti.split(':'))
+			return timedelta(hours = hour, minutes = minute, seconds = second)
+
+		print('Loading dates... ', file = stderr, flush = True)
+		viimeinen_käyttöpäivä = date.today()
+
+		def date_range(start_date, end_date, *, include_end = False):
+			''' Generates date from start_date to end_date. If include_end is True, then end_date will be yielded. '''
+			current_date = start_date
+			while current_date < end_date:
+				yield current_date
+				current_date += timedelta(1)
+			if include_end:
+				yield end_date
+
+		def add_day_to_service(service_name, day):
+			try:
+				service = services[service_name]
+			except KeyError:
+				return
+			else:
+				service.dates.add(day)
+				if day not in services_for_day:
+					services_for_day[day] = set()
+				services_for_day[day].add(service)
+				global viimeinen_käyttöpäivä
+				viimeinen_käyttöpäivä = max(day, viimeinen_käyttöpäivä)
+
+		def filter_day(row, day):
+			day_names = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
+			return int(row[day_names[day.isoweekday() - 1]])
+
+		with gtfs_zip.open('calendar.txt') as file:
+			for row in read_csv(map(bytes.decode, file)):
+				for day in date_range(read_date(row['start_date']), read_date(row['end_date']), include_end = True):
+					if filter_day(row, day):
+						add_day_to_service(service_name = row['service_id'], day = day)
+
+		with gtfs_zip.open('calendar_dates.txt') as file:
+			for row in read_csv(map(bytes.decode, file)):
+				add_day_to_service(service_name = row['service_id'], day = read_date(row['date']))
+
+		def services_available_at(day):
+			for service in services.values():
+				if day in service.dates:
+					yield service
+
+		print('Loading stops... ', file = stderr, end = '', flush = True)
+		with gtfs_zip.open('stops.txt') as file:
+			for row in read_csv(map(bytes.decode, file)):
+				location = Location(float(row['stop_lat']), float(row['stop_lon']))
+				stop = BusStop(
+					reference = row['stop_id'],
+					name = row['stop_name'],
+					location = location, 
+					code = row.get('stop_code', row['stop_id']),
+				)
+				bus_stops[stop.reference] = stop
+		if profile['regions']['use-regions']:
+			with open('regions-per-stop.json') as file:
+				for stop_reference, region in json.load(file).items():
+					try:
+						bus_stops[stop_reference].region = region
+					except KeyError:
+						pass
+			for bus_stop in bus_stops.values():
+				if not hasattr(bus_stop, 'region'):
+					bus_stop.region = None
+		print('%d stops' % len(bus_stops), file = stderr)
+
+		from collections import defaultdict
+		bus_stops_by_name = defaultdict(set)
+		for bus_stop in bus_stops.values():
+			bus_stops_by_name[bus_stop.name].add(bus_stop)
+		bus_stops_by_name = dict(bus_stops_by_name)
+
+		# ryhmittele bus_stops nimen mukaan
+		global all_clusters
+		all_clusters = []
+		def cluster_gtfs_stops():
+			sorted_gtfs_stops = sorted(bus_stops.values(), key = lambda bus_stop: bus_stop.name)
+			for bus_stop in sorted_gtfs_stops:
+				if not bus_stop.cluster:
+					stops_to_cluster = {bus_stop}
+					# etsi pysäkin samannimiset vastaparit
+					for pair_candidate in bus_stops_by_name[bus_stop.name]:
+						distance = pair_candidate.location.distance(bus_stop.location)
+						if pair_candidate is not bus_stop and distance <= 0.4:
+							stops_to_cluster.add(pair_candidate)
+					for stop_to_cluster in stops_to_cluster:
+						if stop_to_cluster.cluster:
+							cluster = stop_to_cluster.cluster
+							break
+					else:
+						cluster = BusStopCluster()
+						all_clusters.append(cluster)
+					for stop_to_cluster in stops_to_cluster:
+						if not stop_to_cluster.cluster:
+							cluster.add_stop(stop_to_cluster)
+			# Merkitse muistiin pysäkkien vastaparit käyttäen hyväksi tämänhetkistä ryhmittelytietoa
+			for bus_stop in bus_stops.values():
+				if bus_stop.cluster:
+					bus_stop.pairs = bus_stop.cluster.stops - {bus_stop}
+			# Ryhmitä ne bus_stops, joilla ei ollut omaa vastaparia, muiden pysäkkien kanssa
+			for bus_stop in sorted_gtfs_stops:
+				if len(bus_stop.cluster.stops) == 1:
+					possibilities = set()
+					for cluster in all_clusters:
+						if cluster is not bus_stop.cluster:
+							distance = cluster.center.distance(bus_stop.location)
+							if distance <= 0.4:
+								possibilities.add((distance, cluster))
+					if possibilities:
+						best = min(possibilities)[1]
+						all_clusters.remove(bus_stop.cluster)
+						best.merge(bus_stop.cluster)
+
+		def shared_elements_in_n_sets(sets):
+			from itertools import combinations
+			result = set()
+			for pair in combinations(sets, 2):
+				result |= pair[0] & pair[1]
+			return result
+
+		def name_clusters():
+			from collections import defaultdict
+			clusters_per_name = defaultdict(set)
+			for cluster in all_clusters:
+				name_representing_stop = min((len(stop.reference), stop.reference, stop) for stop in cluster.stops)[2]
+				clusters_per_name[name_representing_stop.name].add(cluster)
+			for name, clusters in clusters_per_name.items():
+				if len(clusters) == 1:
+					# Simple case: this cluster is the only one that wants this name.
+					next(iter(clusters)).name = name
+				else:
+					if profile['regions']['use-regions']:
+						# Find out if all clusters are in different areas
+						common_regions = shared_elements_in_n_sets({stop.region for stop in cluster.stops} for cluster in clusters)
+						# Proposal: cluster -> the areas unique to the cluster
+						proposal = {
+							cluster: {stop.region for stop in cluster.stops} - common_regions - {None}
+							for cluster in clusters
+						}
+						# If at most one cluster is without its own unique region, name the others by region and this one without any.
+						if sum([1 for unique_areas in proposal.values() if not unique_areas]) <= 1:
+							for cluster, unique_areas in proposal.items():
+								individual_cluster_name = name
+								if unique_areas:
+									individual_cluster_name += ' (' + min(unique_areas) + ')'
+								cluster.name = individual_cluster_name
+								break
+					# If all else fails, just number them.
+					for n, (_, cluster) in enumerate(sorted(
+						min((stop.reference.lower(), cluster) for stop in cluster.stops)
+						for cluster in clusters
+					), 1):
+						individual_cluster_name = name + '-' + str(n)
+						cluster.name = individual_cluster_name
+
+		print('Clustering bus stops...')
+		cluster_gtfs_stops()
+		name_clusters()
+		for cluster in all_clusters:
+			if cluster.url_name in clusters_by_name:
+				print('Warning: Clusters %r and %r share the same URL name: %r' % (cluster.name, clusters_by_name[cluster.url_name].name, cluster.url_name))
+			else:
+				clusters_by_name[cluster.url_name] = cluster
+		print('Loading schedules... ', end = '', flush = True, file = stderr)
+		with gtfs_zip.open('stop_times.txt') as file:
+			row_count = sum(line.count(b'\n') for line in file)
+		with gtfs_zip.open('stop_times.txt') as file:
+			progress = 0
+			for row in read_csv(map(bytes.decode, file)):
+				if int(row.get('pickup_type', '') or '0') and int(row.get('drop_off_type', '') or '0'):
+					continue
+				trip = all_trips[transform_trip_reference(row['trip_id'])]
+				arrival_time = read_time(row['arrival_time'])
+				departure_time = read_time(row['departure_time'])
+				stop = bus_stops[row['stop_id']]
+				traveled_distance = float(row.get('shape_dist_traveled', 1)) * float(profile['metrics']['shape-modifier'])
+				visitnumber = len(trip.schedule) + 1
+				trip.schedule.append(BusHalt(arrival_time, departure_time, stop, trip, traveled_distance, visitnumber))
+				stop.involved_trips.add(trip)
+				progress += 1
+				if progress % 1000 == 0:
+					print('\rLoading schedules... %.1f%%' % (progress * 100 / row_count), end = ' ', file = stderr)
+		print('\rLoading schedules... complete', file = stderr)
+		for trip in all_trips.values():
+			from busroute import simplify_name
+			schedule = trip.concise_schedule()
+			try:
+				trip.from_place = simplify_name(schedule[0])
+				trip.to_place = simplify_name(schedule[-1])
+			except IndexError:
+				trip.from_place = ''
+				trip.to_place = ''
+		for route in routes.values():
+			from collections import Counter
+			from busroute import simplify_name
+			tally = Counter()
+			for trip in route.trips:
+				schedule = trip.concise_schedule()
+				places = set(schedule)
+				do_add = True
+				assert type(schedule) is list
+				for candidate in tally:
+					if places.issubset(set(candidate)):
+						do_add = False
+						tally.update({tuple(candidate)})
+				if do_add:
+					tally.update({tuple(schedule)})
+			try:
+				most_common_route = tally.most_common(1)[0][0]
+				route.description = simplify_name(most_common_route[0]) + ' - ' + simplify_name(most_common_route[-1])
+			except:
+				route.description = ''
+			route.trips = sorted(route.trips, key = lambda trip: trip.schedule and trip.schedule[0].departure_time or timedelta())
+		if 'compatibility' in profile and profile['compatibility'].get('fix-destination-times', False):
+			# There seems to be something strange going on in Föli's gtfs data.
+			# It seems that sometimes the arrival time of the last stop is
+			# completely off, so try estimate when the bus will really arrive
+			# there based on the last leg distance.
+			# I noticed this for bus 220's arrival time at Mylly several years
+			# ago. Possibly this has been fixed in the data by now?
+			for trip in all_trips.values():
+				if len(trip.schedule) >= 2:
+					bus_speed_coefficient = 750 # meters per minute
+					last_leg_distance = trip.schedule[-1].traveled_distance - trip.schedule[-2].traveled_distance
+					trip.schedule[-1].arrival_time = trip.schedule[-2].departure_time + timedelta(minutes = last_leg_distance / bus_speed_coefficient)
+		# Add services to all bus stops
+		for route in routes.values():
+			for trip in route.trips:
+				for halt in trip.schedule:
+					halt.stop.services.add(route.service)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/busroute.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+def via_factor(region_name, *, regions):
+	# how important is it that 
+	if region_name in regions:
+		return float(regions[region_name]['via_factor'])
+	else:
+		return 0
+
+def simplify_name(region_name, *, regions, replace = False):
+	# take short_name to account
+	region = regions.get(region_name)
+	if region:
+		return region.get('short_name', region_name)
+	else:
+		return region_name
+
+def destinations_list(
+	itinerary,
+	*, trip_length,
+	regions,
+	whole = False,
+	format = 'medium',
+):
+	'''
+		Produces a sign of destinations for the given itinerary.
+		`itinerary`: list of region names passed through
+		`trip_length`: length of the itinerary in meters.
+		`regions`: the regions table, used to decide what is important to show.
+		`whole`: whether or not the starting place is also included.
+		`format` controls what kind of sign to produce:
+			- 'short': at most 2 destinations, with reducing
+			- 'medium': at most 3 destinations, with reducing
+			- 'long': at most 4 destinations, no reducing.
+		Returns a list of region names.
+		e.g. ['Turun keskusta', 'Ihala', 'Kauppakeskus Mylly']
+		for Föli bus route 220 at the student village.
+	'''
+	# prefer longer destination signs on longer routes
+	length = ((trip_length / 600) * 3 + len(itinerary) * 2) / 5
+	# collect regions along the itinerary
+	have_already = set()
+	i = 0
+	if not itinerary:
+		# not going anywhere?
+		return ''
+	while i < len(itinerary):
+		region = regions.get(itinerary[i])
+		if not itinerary[i] or itinerary[i] in have_already:
+			del itinerary[i]
+		else:
+			have_already.add(itinerary[i])
+			i += 1
+	from_place = itinerary[0]
+	destination = itinerary[-1]
+	route_weights = {}
+	# create weights for all places along the way. Transforming by x^-0.3
+	# lessens weights for places further out in the itinerary.
+	f = lambda i: i**-0.3
+	# this factor scales the weights so that they become comparable against
+	# constant values
+	factor = 1 / max(f(i + 1) for i in range(len(itinerary)))
+	while via_factor(itinerary[-1], regions = regions) < 0:
+		del itinerary[-1]
+		if not itinerary:
+			return ''
+		destination = itinerary[-1]
+	for i, stop in enumerate(itinerary):
+		# transform index by:
+		# - our gradually decreasing x^-0.3 curve,
+		# - our normalising factor,
+		# - and the via_factor of the stop
+		route_weights[stop] = f(i + 1) * factor * via_factor(stop, regions = regions)
+	# ensure that the starting region does not make it into the destinations
+	# sign by setting its weight to 0
+	if from_place in route_weights:
+		route_weights[from_place] = 0
+	# ensure that the destination does make it to the signpost
+	route_weights[destination] = 1.0e+10
+	# sort destinations by importance
+	weights = sorted(
+		[
+			(stop, route_weights[stop], i)
+			for i, stop in enumerate(itinerary)
+			if route_weights[stop] >= 1
+		], key = lambda stop: -stop[1])
+	# now consider what do we want to display:
+	if format == 'long':
+		# long format, just take at most four destinations
+		weights = weights[:4]
+	elif format == 'short':
+		# short format, take at most two destinations
+		weights = weights[:2]
+		# possibly drop the via-region as well
+		try:
+			if weights[1][0] != destination and weights[1][1] < (500 / length ** 1.15):
+				del weights[1]
+		except IndexError:
+			pass
+	elif format == 'medium':
+		# regular format, at most three destinations
+		weights = weights[:3]
+		# if the third sign value is not significant enough, drop it
+		try:
+			if weights[2][0] != destination and weights[2][1] < (725 / length ** 0.8):
+				del weights[2]
+		except IndexError:
+			pass
+		# and repeat for the second sign value
+		try:
+			if weights[1][0] != destination and weights[1][1] < (500 / length ** 1.15):
+				del weights[1]
+		except IndexError:
+			pass
+	else:
+		raise ValueError(str.format('Unknown format {format}', format = repr(format)))
+	# restore the signpost back to itinerary order
+	weights.sort(key = lambda weight_data: weight_data[2])
+	# form the sign
+	sign = [paino[0] for paino in weights]
+	old_sign = sign.copy()
+	sign = []
+	for place in old_sign:
+		if place not in sign:
+			sign.append(place)
+	if whole:
+		# whole format, also include the starting point
+		sign = [from_place] + sign
+	if not sign:
+		sign = [destination]
+	return sign
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/compute_regions.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+import sys, json
+from misc import *
+from geometry import *
+from configparser import ConfigParser
+
+class Blockmap:
+	'''
+	The blockmap is a grid of latitude and longitude lines and models
+	a block -> set relation. A block is a latitude and longitude square.
+	'''
+	block_size = 200.0 # How big are blocks?
+	def __init__(self, blocks = None):
+		from collections import defaultdict
+		self.blocks = blocks or defaultdict(set)
+	def __getitem__(self, blockid):
+		'''
+		Returns a block for block coordinates. The block is a set that can
+		contain anything.
+		'''
+		return self.blocks[blockid]
+	def blockpoint(self, point):
+		'''
+		Returns blockmap coordinates for geographical coordinates.
+		The blockmap coordinates refer to a block in the blockmap.
+		'''
+		block_coordinate = lambda x: int(x * self.block_size)
+		return block_coordinate(point.x), block_coordinate(point.y)
+
+def blocks_in_shape(blockmap, shape):
+	'''
+	Finds all blocks inside the bounding box of a shape.
+	'''
+	from itertools import product
+	min_x, max_x = minmax(point.x for point in shape.points)
+	min_y, max_y = minmax(point.y for point in shape.points)
+	min_blockpoint = blockmap.blockpoint(Location(min_x, min_y))
+	max_blockpoint = blockmap.blockpoint(Location(max_x, max_y))
+	range_x = range(min_blockpoint[0], max_blockpoint[0] + 1)
+	range_y = range(min_blockpoint[1], max_blockpoint[1] + 1)
+	yield from (blockmap[x, y] for x, y in product(range_x, range_y))
+
+def create_blockmap(regions):
+	'''
+	Creates a blockmap of regions
+	'''
+	blockmap = Blockmap()
+	for region in regions.values():
+		# Minor shapes contain major shapes, so just use those
+		for shape in (region['minor_shapes'] or region['major_shapes']):
+			for block in blocks_in_shape(blockmap, shape):
+				set.add(block, region['name'])
+	return blockmap
+
+def get_args():
+	from argparse import ArgumentParser
+	parser = ArgumentParser()
+	parser.add_argument('gtfs_zip')
+	parser.add_argument('profile')
+	return parser.parse_args()
+
+def test_shapes(shapes, point):
+	return any(shape.contains_point(point) for shape in shapes)
+
+class RegionalLocation:
+	def __init__(self, *, region, region_class):
+		self.region, self.region_class = region, region_class
+	def __repr__(self):
+		return str.format(
+			'RegionalLocation(region = {region}, region_class = {region_class})',
+			region = repr(self.region),
+			region_class = repr(self.region_class),
+		)
+
+def locate_regionally(position, region):
+	if test_shapes(region['major_shapes'], position):
+		return RegionalLocation(region = region['name'], region_class = 'major')
+	elif test_shapes(region['minor_shapes'], position):
+		return RegionalLocation(region = region['name'], region_class = 'minor')
+	else:
+		return None
+
+def find_region_for_point(position, regions, blockmap):
+	for region_name in blockmap[blockmap.blockpoint(position)]:
+		region = regions[region_name]
+		classification = locate_regionally(position, region)
+		if classification:
+			return classification
+
+class RegionTester:
+	def __init__(self, regions):
+		self.regions = regions
+		self.blockmap = create_blockmap(regions)
+	def __call__(self, latitude, longitude):
+		return find_region_for_point(
+			position = Location(float(latitude), float(longitude)),
+			regions = self.regions,
+			blockmap = self.blockmap,
+		)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/datamodel.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,55 @@
+import sqlalchemy.ext.declarative
+GtfsBase = sqlalchemy.ext.declarative.declarative_base(name = 'GtfsBase')
+
+class GtfsRoute(GtfsBase):
+	__tablename__ = 'routes'
+	id = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+	reference = sqlalchemy.Column(sqlalchemy.String)
+	description = sqlalchemy.Column(sqlalchemy.String)
+	type = sqlalchemy.Column(sqlalchemy.Integer)
+
+class GtfsShape(GtfsBase):
+	__tablename__ = 'shapes'
+	id = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+	length = sqlalchemy.Column(sqlalchemy.Numeric, default = 0)
+	shape_coordinates = sqlalchemy.Column(sqlalchemy.String)
+
+class GtfsTrip(GtfsBase):
+	__tablename__ = 'trips'
+	id = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+	route_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey(GtfsRoute.id))
+	service = sqlalchemy.Column(sqlalchemy.String)
+	shape_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey(GtfsShape.id))
+	route = sqlalchemy.orm.relationship('GtfsRoute', foreign_keys = 'GtfsTrip.route_id')
+	shape = sqlalchemy.orm.relationship('GtfsShape', foreign_keys = 'GtfsTrip.shape_id')
+
+class GtfsService(GtfsBase):
+	__tablename__ = 'services'
+	id = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+
+class GtfsStop(GtfsBase):
+	__tablename__ = 'stops'
+	stop_id = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+	stop_name = sqlalchemy.Column(sqlalchemy.String)
+	stop_latitude = sqlalchemy.Column(sqlalchemy.Numeric)
+	stop_longitude = sqlalchemy.Column(sqlalchemy.Numeric)
+	stop_region = sqlalchemy.Column(sqlalchemy.String)
+	stop_region_major = sqlalchemy.Column(sqlalchemy.Boolean)
+
+class GtfsRegion(GtfsBase):
+	# Not a gtfs data set, but the Gtfs prefix added for consistency
+	__tablename__ = 'regions'
+	region_name = sqlalchemy.Column(sqlalchemy.String, primary_key = True)
+	region_name_sv = sqlalchemy.Column(sqlalchemy.String)
+	region_name_en = sqlalchemy.Column(sqlalchemy.String)
+	region_name_ja = sqlalchemy.Column(sqlalchemy.String)
+	region_short_name = sqlalchemy.Column(sqlalchemy.String)
+	region_short_name_sv = sqlalchemy.Column(sqlalchemy.String)
+	region_short_name_en = sqlalchemy.Column(sqlalchemy.String)
+	region_short_name_ja = sqlalchemy.Column(sqlalchemy.String)
+	region_internal_name = sqlalchemy.Column(sqlalchemy.String)
+	region_internal_name_sv = sqlalchemy.Column(sqlalchemy.String)
+	region_internal_name_en = sqlalchemy.Column(sqlalchemy.String)
+	region_internal_name_ja = sqlalchemy.Column(sqlalchemy.String)
+	municipality = sqlalchemy.Column(sqlalchemy.String, nullable = False)
+	external = sqlalchemy.Column(sqlalchemy.Boolean)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/föli.ini	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,13 @@
+[metrics]
+shape-modifier = 1
+
+[compatibility]
+fix-destination-times = 1
+
+[regions]
+use-regions = 1
+osm-path = föli.osm
+
+[date-exceptions]
+1.5. = sunday
+30.4. = weekday
--- a/föli.osm	Tue Jul 28 21:51:54 2020 +0300
+++ b/föli.osm	Wed Jul 29 23:45:53 2020 +0300
@@ -254,12 +254,6 @@
   <node id='-751214' action='modify' visible='true' lat='60.46926' lon='22.15591' />
   <node id='-751216' action='modify' visible='true' lat='60.46261970043' lon='22.15176935715' />
   <node id='-751218' action='modify' visible='true' lat='60.46403335121' lon='22.139827836' />
-  <node id='-751220' action='modify' visible='true' lat='60.46798' lon='22.145' />
-  <node id='-751222' action='modify' visible='true' lat='60.473' lon='22.15251' />
-  <node id='-751224' action='modify' visible='true' lat='60.46819' lon='22.14438' />
-  <node id='-751226' action='modify' visible='true' lat='60.46426003015' lon='22.13925261642' />
-  <node id='-751228' action='modify' visible='true' lat='60.46574371762' lon='22.12923059265' />
-  <node id='-751230' action='modify' visible='true' lat='60.46689' lon='22.13144' />
   <node id='-751232' action='modify' visible='true' lat='60.47074' lon='22.1599' />
   <node id='-751234' action='modify' visible='true' lat='60.46629' lon='22.17157' />
   <node id='-751236' action='modify' visible='true' lat='60.46058' lon='22.182' />
@@ -933,7 +927,7 @@
   <node id='-752762' action='modify' visible='true' lat='60.49707' lon='22.44944' />
   <node id='-752764' action='modify' visible='true' lat='60.50221' lon='22.44357' />
   <node id='-752766' action='modify' visible='true' lat='60.45985056278' lon='22.21253498413' />
-  <node id='-752768' action='modify' visible='true' lat='60.45712791392' lon='22.23032738819' />
+  <node id='-752768' action='modify' visible='true' lat='60.45736592293' lon='22.23034679353' />
   <node id='-752772' action='modify' visible='true' lat='60.45322' lon='22.23343' />
   <node id='-752774' action='modify' visible='true' lat='60.45646' lon='22.20923' />
   <node id='-752776' action='modify' visible='true' lat='60.5154' lon='22.37237' />
@@ -959,8 +953,8 @@
   <node id='-752816' action='modify' visible='true' lat='60.40826' lon='23.09137' />
   <node id='-752818' action='modify' visible='true' lat='60.40088' lon='23.20724' />
   <node id='-752820' action='modify' visible='true' lat='60.35661' lon='23.16847' />
-  <node id='-752822' action='modify' visible='true' lat='60.5184' lon='22.29066' />
-  <node id='-752824' action='modify' visible='true' lat='60.50809' lon='22.2868' />
+  <node id='-752822' action='modify' visible='true' lat='60.51161960022' lon='22.28368625656' />
+  <node id='-752824' action='modify' visible='true' lat='60.50894564243' lon='22.28235826187' />
   <node id='-752826' action='modify' visible='true' lat='60.50811098332' lon='22.26959183624' />
   <node id='-752828' action='modify' visible='true' lat='60.51298386273' lon='22.27387947672' />
   <node id='-752830' action='modify' visible='true' lat='60.58368' lon='22.71183' />
@@ -1180,11 +1174,10 @@
   <node id='-753264' action='modify' visible='true' lat='60.47125' lon='22.1602' />
   <node id='-753266' action='modify' visible='true' lat='60.47505' lon='22.17021' />
   <node id='-753268' action='modify' visible='true' lat='60.49006734' lon='22.16173632813' />
-  <node id='-753270' action='modify' visible='true' lat='60.49232772497' lon='22.17103525131' />
-  <node id='-753272' action='modify' visible='true' lat='60.48414612892' lon='22.17566465866' />
-  <node id='-753274' action='modify' visible='true' lat='60.48366643913' lon='22.17485110046' />
-  <node id='-753276' action='modify' visible='true' lat='60.48503' lon='22.16991' />
-  <node id='-753278' action='modify' visible='true' lat='60.47810055895' lon='22.16361062439' />
+  <node id='-753270' action='modify' visible='true' lat='60.49202916986' lon='22.16935870886' />
+  <node id='-753272' action='modify' visible='true' lat='60.48541238241' lon='22.17768429637' />
+  <node id='-753276' action='modify' visible='true' lat='60.48213520662' lon='22.16929433584' />
+  <node id='-753278' action='modify' visible='true' lat='60.47783208229' lon='22.16448781729' />
   <node id='-753280' action='modify' visible='true' lat='60.48313764049' lon='22.16155331757' />
   <node id='-753282' action='modify' visible='true' lat='60.47294675769' lon='22.24241136432' />
   <node id='-753284' action='modify' visible='true' lat='60.46959428411' lon='22.2428834331' />
@@ -1395,8 +1388,7 @@
   <node id='-753736' action='modify' visible='true' lat='60.44596910222' lon='22.19901321173' />
   <node id='-753738' action='modify' visible='true' lat='60.44966926917' lon='22.27164744795' />
   <node id='-753740' action='modify' visible='true' lat='60.4598706772' lon='22.28760927647' />
-  <node id='-753742' action='modify' visible='true' lat='60.45906795978' lon='22.28899329633' />
-  <node id='-753744' action='modify' visible='true' lat='60.4606985014' lon='22.29290127218' />
+  <node id='-753744' action='modify' visible='true' lat='60.4607381729' lon='22.29262768686' />
   <node id='-753746' action='modify' visible='true' lat='60.46158341544' lon='22.29061684753' />
   <node id='-753748' action='modify' visible='true' lat='60.47454357138' lon='22.38987918377' />
   <node id='-753750' action='modify' visible='true' lat='60.47579134546' lon='22.4227952528' />
@@ -1493,10 +1485,6 @@
   <node id='-753950' action='modify' visible='true' lat='60.42366124347' lon='22.58943553448' />
   <node id='-753952' action='modify' visible='true' lat='60.41660678123' lon='22.58694644451' />
   <node id='-753954' action='modify' visible='true' lat='60.41643728584' lon='22.56827826977' />
-  <node id='-753956' action='modify' visible='true' lat='60.39861415516' lon='22.47725482464' />
-  <node id='-753958' action='modify' visible='true' lat='60.39687582455' lon='22.50772471905' />
-  <node id='-753960' action='modify' visible='true' lat='60.38192661188' lon='22.49313345909' />
-  <node id='-753962' action='modify' visible='true' lat='60.38697407098' lon='22.46661177635' />
   <node id='-753964' action='modify' visible='true' lat='60.37738747233' lon='22.45502463341' />
   <node id='-753966' action='modify' visible='true' lat='60.35268686837' lon='22.46695509911' />
   <node id='-753968' action='modify' visible='true' lat='60.36185628724' lon='22.51536360741' />
@@ -1515,7 +1503,7 @@
   <node id='-753994' action='modify' visible='true' lat='60.48284352864' lon='22.18609569311' />
   <node id='-753996' action='modify' visible='true' lat='60.48275895369' lon='22.19435689688' />
   <node id='-753998' action='modify' visible='true' lat='60.48950311362' lon='22.19431398153' />
-  <node id='-754000' action='modify' visible='true' lat='60.48804446434' lon='22.1746372962' />
+  <node id='-754000' action='modify' visible='true' lat='60.48900633464' lon='22.17285630941' />
   <node id='-754002' action='modify' visible='true' lat='60.49229339032' lon='22.1716332221' />
   <node id='-754004' action='modify' visible='true' lat='60.48849897804' lon='22.1750235343' />
   <node id='-754006' action='modify' visible='true' lat='60.49008444107' lon='22.18058107138' />
@@ -2594,7 +2582,6 @@
   <node id='-780494' action='modify' visible='true' lat='60.42522868335' lon='22.27493045568' />
   <node id='-780495' action='modify' visible='true' lat='60.42533458944' lon='22.27756974936' />
   <node id='-780496' action='modify' visible='true' lat='60.43206950823' lon='22.27604625463' />
-  <node id='-780497' action='modify' visible='true' lat='60.42979292292' lon='22.26681945562' />
   <node id='-780506' action='modify' visible='true' lat='60.48971978615' lon='22.23348497272' />
   <node id='-780507' action='modify' visible='true' lat='60.48784362539' lon='22.23309873462' />
   <node id='-780508' action='modify' visible='true' lat='60.48625805282' lon='22.2335386169' />
@@ -2676,13 +2663,10 @@
   <node id='-780872' action='modify' visible='true' lat='60.46349918973' lon='22.29348062932' />
   <node id='-780873' action='modify' visible='true' lat='60.46150514557' lon='22.29100763261' />
   <node id='-780874' action='modify' visible='true' lat='60.46070379094' lon='22.29303538263' />
-  <node id='-780875' action='modify' visible='true' lat='60.45842127645' lon='22.29156553209' />
-  <node id='-780876' action='modify' visible='true' lat='60.45810123552' lon='22.29014932573' />
-  <node id='-780877' action='modify' visible='true' lat='60.45882330687' lon='22.28915690839' />
+  <node id='-780877' action='modify' visible='true' lat='60.45885240098' lon='22.28921591699' />
   <node id='-780879' action='modify' visible='true' lat='60.46038906209' lon='22.2863620466' />
   <node id='-780881' action='modify' visible='true' lat='60.46047105057' lon='22.28638350427' />
-  <node id='-780889' action='modify' visible='true' lat='60.45818322846' lon='22.29027002782' />
-  <node id='-780891' action='modify' visible='true' lat='60.45844772597' lon='22.29146897256' />
+  <node id='-780889' action='modify' visible='true' lat='60.45897935681' lon='22.28931784093' />
   <node id='-780902' action='modify' visible='true' lat='60.46678618879' lon='22.30549692035' />
   <node id='-780903' action='modify' visible='true' lat='60.46739436564' lon='22.30592070937' />
   <node id='-780904' action='modify' visible='true' lat='60.47129171543' lon='22.30920909762' />
@@ -2951,6 +2935,17 @@
   <node id='-782513' action='modify' visible='true' lat='60.36839214755' lon='22.17234130383' />
   <node id='-782515' action='modify' visible='true' lat='60.37185054157' lon='22.17414374828' />
   <node id='-782517' action='modify' visible='true' lat='60.37066242305' lon='22.179593997' />
+  <node id='-786652' action='modify' visible='true' lat='60.48745780987' lon='22.17287777781' />
+  <node id='-786654' action='modify' visible='true' lat='60.48678130034' lon='22.17397211909' />
+  <node id='-786656' action='modify' visible='true' lat='60.48616820138' lon='22.17663287044' />
+  <node id='-786664' action='modify' visible='true' lat='60.48382669626' lon='22.17330692053' />
+  <node id='-786675' action='modify' visible='true' lat='60.46563855978' lon='22.1359276557' />
+  <node id='-786677' action='modify' visible='true' lat='60.46735206334' lon='22.13305232763' />
+  <node id='-799172' action='modify' visible='true' lat='60.5621726874' lon='22.39037274241' />
+  <node id='-799173' action='modify' visible='true' lat='60.56144500579' lon='22.38823770404' />
+  <node id='-799174' action='modify' visible='true' lat='60.55902982916' lon='22.38700388789' />
+  <node id='-799175' action='modify' visible='true' lat='60.55744773796' lon='22.38706826091' />
+  <node id='-799176' action='modify' visible='true' lat='60.55975229173' lon='22.39624141574' />
   <way id='-755861' action='modify' visible='true'>
     <nd ref='-750676' />
     <nd ref='-750678' />
@@ -3615,23 +3610,13 @@
     <nd ref='-751214' />
     <nd ref='-751216' />
     <nd ref='-751218' />
-    <nd ref='-751220' />
+    <nd ref='-786675' />
+    <nd ref='-786677' />
     <nd ref='-751210' />
     <tag k='boundary' v='minor_region' />
     <tag k='municipality' v='Raisio' />
     <tag k='name' v='Krookila' />
   </way>
-  <way id='-755905' action='modify' visible='true'>
-    <nd ref='-751222' />
-    <nd ref='-751224' />
-    <nd ref='-751226' />
-    <nd ref='-751228' />
-    <nd ref='-751230' />
-    <nd ref='-751222' />
-    <tag k='boundary' v='minor_region' />
-    <tag k='municipality' v='Raisio' />
-    <tag k='name' v='Inkoinen' />
-  </way>
   <way id='-755906' action='modify' visible='true'>
     <nd ref='-751232' />
     <nd ref='-751234' />
@@ -4868,14 +4853,17 @@
     <tag k='boundary' v='major_region' />
     <tag k='internal_name' v='Keskusta' />
     <tag k='internal_name:en' v='Centrum' />
+    <tag k='internal_name:ja' v='市内' />
     <tag k='internal_name:ru' v='Центр' />
     <tag k='internal_name:sv' v='Centrum' />
     <tag k='municipality' v='Lieto' />
     <tag k='name' v='Liedon keskusta' />
     <tag k='name:en' v='Lieto centrum' />
+    <tag k='name:ja' v='リエト市内' />
     <tag k='name:ru' v='Центр Лието' />
     <tag k='name:sv' v='Lundo centrum' />
     <tag k='short_name' v='Lieto' />
+    <tag k='short_name:ja' v='リエト' />
     <tag k='short_name:ru' v='Лието' />
     <tag k='short_name:sv' v='Lundo' />
     <tag k='via_factor' v='200' />
@@ -5655,7 +5643,7 @@
     <nd ref='-753202' />
     <nd ref='-753204' />
     <nd ref='-753198' />
-    <tag k='boundary' v='minor_region' />
+    <tag k='boundary' v='major_region' />
     <tag k='municipality' v='Turku' />
     <tag k='name' v='Lehmusvalkama' />
     <tag k='name:ru' v='Лехмусвалкама' />
@@ -5734,8 +5722,11 @@
     <nd ref='-753268' />
     <nd ref='-753270' />
     <nd ref='-754000' />
+    <nd ref='-786652' />
+    <nd ref='-786654' />
+    <nd ref='-786656' />
     <nd ref='-753272' />
-    <nd ref='-753274' />
+    <nd ref='-786664' />
     <nd ref='-753276' />
     <nd ref='-753278' />
     <nd ref='-753850' />
@@ -5786,11 +5777,11 @@
     <tag k='municipality' v='Kaarina' />
     <tag k='name' v='Kaarinan keskusta' />
     <tag k='name:en' v='Kaarina centrum' />
-    <tag k='name:ja' v='カアリナ市内' />
+    <tag k='name:ja' v='カーリナ市内' />
     <tag k='name:ru' v='Центр Каарина' />
     <tag k='name:sv' v='S:t Karins centrum' />
     <tag k='short_name' v='Kaarina' />
-    <tag k='short_name:ja' v='カアリナ' />
+    <tag k='short_name:ja' v='カーリナ' />
     <tag k='short_name:ru' v='Каарина' />
     <tag k='short_name:sv' v='S:t Karins' />
     <tag k='via_factor' v='100' />
@@ -6265,7 +6256,7 @@
     <nd ref='-753714' />
     <nd ref='-753716' />
     <nd ref='-753710' />
-    <tag k='boundary' v='minor_region' />
+    <tag k='boundary' v='major_region' />
     <tag k='municipality' v='Raisio' />
     <tag k='name' v='Somersoja' />
   </way>
@@ -6294,9 +6285,7 @@
   </way>
   <way id='-756114' action='modify' visible='true'>
     <nd ref='-753740' />
-    <nd ref='-753742' />
     <nd ref='-780889' />
-    <nd ref='-780891' />
     <nd ref='-753744' />
     <nd ref='-753746' />
     <nd ref='-753740' />
@@ -6517,16 +6506,6 @@
     <tag k='municipality' v='Piikkiö' />
     <tag k='name' v='Heernummi' />
   </way>
-  <way id='-756134' action='modify' visible='true'>
-    <nd ref='-753956' />
-    <nd ref='-753958' />
-    <nd ref='-753960' />
-    <nd ref='-753962' />
-    <nd ref='-753956' />
-    <tag k='boundary' v='major_region' />
-    <tag k='municipality' v='Piikkiö' />
-    <tag k='name' v='Niemenkulma' />
-  </way>
   <way id='-756135' action='modify' visible='true'>
     <nd ref='-753964' />
     <nd ref='-753966' />
@@ -7299,7 +7278,6 @@
     <tag k='municipality' v='Turku' />
     <tag k='name' v='Metsämäen ravirata' />
     <tag k='name:sv' v='Skogsbacka travbana' />
-    <tag k='via_factor' v='500' />
   </way>
   <way id='-756202' action='modify' visible='true'>
     <nd ref='-754702' />
@@ -7966,7 +7944,6 @@
     <tag k='municipality' v='Turku' />
     <tag k='name' v='Biolaakso' />
     <tag k='name:ru' v='Биолааксо' />
-    <tag k='name:sv' v='Biodalen' />
   </way>
   <way id='-756265' action='modify' visible='true'>
     <nd ref='-755262' />
@@ -8032,6 +8009,8 @@
     <tag k='boundary' v='minor_region' />
     <tag k='municipality' v='Turku' />
     <tag k='name' v='Siirtolapuutarha' />
+    <tag k='name:en' v='Allotments' />
+    <tag k='name:sv' v='Koloniträdgården' />
   </way>
   <way id='-756272' action='modify' visible='true'>
     <nd ref='-755332' />
@@ -8473,7 +8452,7 @@
     <tag k='boundary' v='minor_region' />
     <tag k='municipality' v='Turku' />
     <tag k='name' v='Itäinen Pitkäkatu' />
-    <tag k='name:sv' v='Östra Långgatan' />
+    <tag k='name:sv' v='Österlånggatan' />
     <tag k='via_factor' v='50' />
   </way>
   <way id='-756309' action='modify' visible='true'>
@@ -8528,6 +8507,7 @@
     <nd ref='-755860' />
     <nd ref='-755852' />
     <tag k='boundary' v='major_region' />
+    <tag k='municipality' v='Turku' />
     <tag k='name' v='Yli-Maarian koulu' />
     <tag k='name:en' v='Yli-Maaria school' />
     <tag k='name:ru' v='Юли-Маариа школа' />
@@ -8550,6 +8530,7 @@
     <nd ref='-781777' />
     <nd ref='-781778' />
     <tag k='boundary' v='minor_region' />
+    <tag k='municipality' v='Turku' />
     <tag k='name' v='Korppolaismäki' />
     <tag k='name:sv' v='Korpolaisbacken' />
   </way>
@@ -8822,7 +8803,7 @@
     <nd ref='-780494' />
     <nd ref='-780495' />
     <nd ref='-780496' />
-    <nd ref='-780497' />
+    <nd ref='-780487' />
     <tag k='boundary' v='major_region' />
     <tag k='is_in' v='Ispoinen' />
   </way>
@@ -8938,8 +8919,6 @@
   <way id='-760364' action='modify' visible='true'>
     <nd ref='-780844' />
     <nd ref='-780877' />
-    <nd ref='-780876' />
-    <nd ref='-780875' />
     <nd ref='-780874' />
     <nd ref='-780873' />
     <nd ref='-780872' />
@@ -8959,6 +8938,7 @@
     <nd ref='-780879' />
     <nd ref='-780844' />
     <tag k='boundary' v='major_region' />
+    <tag k='municipality' v='Turku' />
     <tag k='name' v='Ylioppilaskylä' />
     <tag k='name:en' v='Student village' />
     <tag k='name:ja' v='学生村' />
@@ -9227,6 +9207,7 @@
     <nd ref='-781474' />
     <nd ref='-781464' />
     <tag k='boundary' v='major_region' />
+    <tag k='municipality' v='Turku' />
     <tag k='name' v='Kakskerta' />
     <tag k='via_factor' v='200' />
   </way>
@@ -9349,4 +9330,17 @@
     <tag k='boundary' v='major_region' />
     <tag k='is_in' v='Kastu' />
   </way>
+  <way id='-764585' action='modify' visible='true'>
+    <nd ref='-799172' />
+    <nd ref='-799173' />
+    <nd ref='-799174' />
+    <nd ref='-799175' />
+    <nd ref='-799176' />
+    <nd ref='-799172' />
+    <tag k='boundary' v='minor_region' />
+    <tag k='municipality' v='Lieto' />
+    <tag k='name' v='Autopurkaamo' />
+    <tag k='name:en' v='Car scrap yard' />
+    <tag k='name:sv' v='Bilnermonteringen' />
+  </way>
 </osm>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/geometry.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,124 @@
+from math import sqrt, hypot, radians, sin, cos, atan2
+
+class Location:
+	def __init__(self, latitude, longitude):
+		self.latitude, self.longitude = latitude, longitude
+	def distance(self, other):
+		# https://stackoverflow.com/a/365853
+		a = sin(radians(self.latitude - other.latitude) / 2) ** 2
+		a += sin(radians(self.longitude - other.longitude) / 2) ** 2 * cos(radians(self.latitude)) * cos(radians(other.latitude))
+		return 6371 * 2 * atan2(sqrt(a), sqrt(1 - a))
+	def __repr__(self):
+		return '%s(%r, %r)' % (type(self).__name__, self.latitude, self.longitude)
+	def __str__(self):
+		return '%.5f, %.5f' % (self.latitude, self.longitude)
+	@property
+	def x(self):
+		return self.latitude
+	@property
+	def y(self):
+		return self.longitude
+	@property
+	def link_to_map(self):
+		return 'http://www.openstreetmap.org/#map=19/%f/%f' % (self.latitude, self.longitude)
+
+class Ring:
+	def __init__(self, container):
+		self.container = container
+	def __getitem__(self, i):
+		while i < 0:
+			i += len(self.container)
+		while i >= len(self.container):
+			i -= len(self.container)
+		return self.container[i]
+	def __iter__(self):
+		return iter(self.container)
+	def __len__(self):
+		return len(self.container)
+
+class Polygon:
+	def __init__(self, *points):
+		self.points = points
+	def __repr__(self):
+		return '%s(%s)' % (type(self).__name__, ', '.join(map(repr, self.points)))
+	def area(self):
+		ring = Ring(self.points)
+		return sum(
+			ring[i].x * ring[i + 1].y - ring[i + 1].x * ring[i].y
+			for i in range(len(ring))
+		) / 2
+	def circumference(self):
+		ring = Ring(self.points)
+		return sum(
+			sqrt((ring[i + 1].x - ring[i].x)**2 + (ring[i + 1].y - ring[i].y)**2)
+			for i in range(len(ring))
+		)
+	def centroid(self):
+		ring = Ring(self.points)
+		x = sum(
+			(ring[i].x + ring[i + 1].x) * (ring[i].x * ring[i + 1].y - ring[i + 1].x * ring[i].y)
+			for i in range(len(ring))
+		) / 6 / self.area()
+		y = sum(
+			(ring[i].y + ring[i + 1].y) * (ring[i].x * ring[i + 1].y - ring[i + 1].x * ring[i].y)
+			for i in range(len(ring))
+		) / 6 / self.area()
+		return self.point_type()(x, y)
+	def point_type(self):
+		if len(self.points):
+			return type(self.points[0])
+		else:
+			return Point
+	def segments(self):
+		ring = Ring(self.points)
+		for i in range(len(ring)):
+			yield LineSegment(ring[i], ring[i + 1])
+	def contains_point(self, point):
+		outer_point = self.point_type()(
+			min(point.x for point in self.points) - 1,
+			min(point.y for point in self.points) - 1
+		)
+		outer_segment = LineSegment(point, outer_point)
+		intersections = 0
+		for segment in self.segments():
+			if segment.intersection(outer_segment) is not None:
+				intersections += 1
+		return bool(intersections & 1)
+
+class LineSegment:
+	def __init__(self, p1, p2):
+		self.p1, self.p2 = p1, p2
+	def __repr__(self):
+		return 'LineSegment(%r, %r)' % (self.p1, self.p2)
+	def length(self):
+		return hypot(self.p1.x - self.p2.x, self.p1.y - self.p2.y)
+	def intersection(self, other):
+		point_type = type(self.p1)
+		x = (self.p1.x, self.p2.x, other.p1.x, other.p2.x)
+		y = (self.p1.y, self.p2.y, other.p1.y, other.p2.y)
+		try:
+			denominator = (x[0] - x[1]) * (y[2] - y[3]) - (y[0] - y[1]) * (x[2] - x[3])
+			Px = ((x[0] * y[1] - y[0] * x[1]) * (x[2] - x[3]) - (x[0] - x[1]) * (x[2] * y[3] - y[2] * x[3])) / denominator
+			Py = ((x[0] * y[1] - y[0] * x[1]) * (y[2] - y[3]) - (y[0] - y[1]) * (x[2] * y[3] - y[2] * x[3])) / denominator
+			distance = lambda n: hypot(Px - x[n], Py - y[n])
+			if max(distance(0), distance(1)) <= self.length() and max(distance(2), distance(3)) <= other.length():
+				return point_type(Px, Py)
+			else:
+				return None
+		except ZeroDivisionError:
+			return None
+
+class Point:
+	def __init__(self, x, y):
+		self.x, self.y = x, y
+	def __repr__(self):
+		return 'Point(%r, %r)' % (self.x, self.y)
+
+A = Polygon(
+	Point(2,3),
+	Point(1,1),
+	Point(4,0),
+	Point(6,2),
+	Point(4,4))
+L1 = LineSegment(Point(1, 1), Point(-1, 5))
+L2 = LineSegment(Point(1, 5), Point(5, 1))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/gtfsc.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,184 @@
+#!/usr/bin/env python3
+import io
+import sys
+import sqlalchemy
+import sqlalchemy.orm
+from datamodel import *
+
+ROUTE_TYPES = {
+	'0': 'tram',
+	'1': 'subway',
+	'2': 'rail',
+	'3': 'bus',
+	'4': 'ferry',
+	'5': 'cable-tram',
+	'6': 'aerial-lift',
+	'7': 'funicular',
+	'11': 'trolleybus',
+	'12': 'monorail',
+}
+
+def read_csv(file):
+	import csv
+	reader = csv.reader(file)
+	keys = next(reader)
+	for i in range(len(keys)):
+		keys[i] = keys[i].replace('\ufeff', '').strip()
+	for row in reader:
+		yield dict(zip(keys, row))
+
+def load_gtfs_routes(gtfs_zip):
+	with gtfs_zip.open('routes.txt') as file:
+		for row in read_csv(map(bytes.decode, file)):
+			route = GtfsRoute(
+				id = row['route_id'],
+				reference = row['route_short_name'],
+				description = row['route_long_name'],
+				type = int(row['route_type']),
+			)
+			yield route.id, route
+
+def load_shapes(gtfs_zip):
+	from collections import defaultdict
+	shapes = dict()
+	with gtfs_zip.open('shapes.txt') as file:
+		for row in read_csv(map(bytes.decode, file)):
+			shape_id = row['shape_id']
+			if shape_id not in shapes:
+				shapes[shape_id] = GtfsShape(
+					id = shape_id,
+					shape_coordinates = '',
+					length = 0,
+				)
+			shape = shapes[shape_id]
+			if len(shape.shape_coordinates) > 0:
+				shape.shape_coordinates += ' '
+			shape.shape_coordinates += str.format(
+				'{shape_pt_lat} {shape_pt_lon}',
+				**row,
+			)
+			shape.length = max(shape.length, float(row['shape_dist_traveled']))
+	return shapes.values()
+
+def trip_length(trip, *, shapes):
+	if trip.shape_id:
+		return dict.get(shapes, trip.shape_id).length * float(profile['metrics']['shape-modifier'])
+	else:
+		return 0
+
+def load_trips(gtfs_zip):
+	services = set()
+	with gtfs_zip.open('trips.txt') as file:
+		for row in read_csv(map(bytes.decode, file)):
+			if row['service_id'] not in services:
+				set.add(services, row['service_id'])
+				yield GtfsService(id = row['service_id'])
+			yield GtfsTrip(
+				id = row['trip_id'],
+				route_id = row['route_id'],
+				service = row['service_id'],
+				shape_id = dict.get(row, 'shape_id')
+			)
+
+def load_stops(gtfs_zip):
+	with gtfs_zip.open('stops.txt') as file:
+		for row in read_csv(map(bytes.decode, file)):
+			lat = float(row['stop_lat'])
+			lon = float(row['stop_lon'])
+			yield GtfsStop(
+				stop_id = row['stop_id'],
+				stop_name = row['stop_name'],
+				stop_latitude = lat,
+				stop_longitude = float(row['stop_lon']),
+			)
+
+def gtfs_stop_spatial_testing(session, regions):
+	print('Finding out in which regions bus stops are...')
+	from compute_regions import RegionTester
+	regiontester = RegionTester(regions)
+	for bus_stop in session.query(GtfsStop):
+		classification = regiontester(
+			latitude = bus_stop.stop_latitude,
+			longitude = bus_stop.stop_longitude,
+		)
+		if classification:
+			bus_stop.stop_region = classification.region
+			bus_stop.stop_region_major = classification.region_class == 'major'
+
+def load_with_loading_text(fn, what, device):
+	print(
+		str.format('Loading {}s... ', what),
+		file = device,
+		end = '',
+		flush = True,
+	)
+	result = fn()
+	print(
+		str.format(
+			'{n} {what}s',
+			n = len(result if type(result) is not tuple else result[0]),
+			what = what,
+		),
+		file = device,
+	)
+	return result
+
+def load_gtfs(
+	gtfs_zip_path,
+	*,
+	profile,
+	session,
+	device = sys.stderr
+):
+	from zipfile import ZipFile
+	with ZipFile(gtfs_zip_path) as gtfs_zip:
+		print('Loading routes...')
+		for route_id, route in load_gtfs_routes(gtfs_zip):
+			session.add(route)
+		print('Loading stops...')
+		for stop in load_stops(gtfs_zip):
+			session.add(stop)
+		print('Loading shapes...')
+		for shape in load_shapes(gtfs_zip):
+			session.add(shape)
+		print('Loading trips...')
+		for trip_or_service in load_trips(gtfs_zip):
+			session.add(trip_or_service)
+
+def parse_yesno(value):
+	return value and value != 'no'
+
+def regions_to_db(regions):
+	from itertools import product
+	for region in regions.values():
+		names = dict()
+		for prefix, language in product(
+			['', 'short_', 'internal_'],
+			['', ':sv', ':en', ':ja'],
+		):
+			key = 'region_' + prefix + 'name' + str.replace(language, ':', '_')
+			value = dict.get(region, prefix + 'name' + language)
+			names[key] = value
+		yield GtfsRegion(
+			**names,
+			municipality = dict.get(region, 'municipality'),
+			external = parse_yesno(dict.get(region, 'external')),
+		)
+
+if __name__ == '__main__':
+	import sys
+	from configparser import ConfigParser
+	from regions import parse_regions
+	profile = ConfigParser()
+	profile.read('föli.ini')
+	engine = sqlalchemy.create_engine('sqlite:///gtfs.db')
+	GtfsBase.metadata.create_all(engine)
+	session = sqlalchemy.orm.sessionmaker(bind = engine)()
+	regions = parse_regions('föli.osm')
+	for region in regions_to_db(regions):
+		session.add(region)
+	session.commit()
+	buses = load_gtfs('gtfs.zip', profile = profile, session = session)
+	gtfs_stop_spatial_testing(session = session, regions = regions)
+	print('Committing to database...')
+	session.commit()
--- a/katakana.py	Tue Jul 28 21:51:54 2020 +0300
+++ b/katakana.py	Wed Jul 29 23:45:53 2020 +0300
@@ -153,14 +153,13 @@
 			katakana[latin[0] + latin] = 'ッ' + katakana[latin]
 	# add long vowel versions
 	for latin in copy(list(katakana.keys())):
-		katakana[latin + latin[-1]] = katakana[latin] + 'ー'
+		if latin != 'n':
+			katakana[latin + latin[-1]] = katakana[latin] + 'ー'
 	return katakana
 
 def katakana_keys(kana_table):
 	return sorted(kana_table.keys(), key = len)[::-1]
 
-katakana_table = full_katakana_table(RAW_KATAKANA_TABLE)
-
 def finnish_to_romaji(finnish):
 	# translates finnish text to Japanese romaji
 	# does not, however, fill in 'u' vowels to consonants, that is done 
@@ -175,6 +174,8 @@
 		.replace('l', 'r')
 		.replace('ä', 'a')
 		.replace('ö', 'o')
+		.replace('x', 'ks')
+		.replace('c', 'k')
 		.replace('å', 'oo'))
 
 def splice_romaji(romaji, keys):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/misc.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,21 @@
+from configparser import ConfigParser
+profile = ConfigParser()
+
+def minmax(data):
+	'''
+	From: http://code.activestate.com/recipes/577916-fast-minmax-function/
+	Computes the minimum and maximum values in one-pass using only
+	1.5*len(data) comparisons
+	'''
+	import itertools
+	it = iter(data)
+	try:
+		lo = hi = next(it)
+	except StopIteration:
+		raise ValueError('minmax() arg is an empty sequence')
+	for x, y in itertools.zip_longest(it, it, fillvalue = lo):
+		if x > y:
+			x, y = y, x
+		lo = min(x, lo)
+		hi = max(y, hi)
+	return lo, hi
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/regions.py	Wed Jul 29 23:45:53 2020 +0300
@@ -0,0 +1,88 @@
+#!/usr/bin/env python3
+from xml.etree import ElementTree
+from geometry import *
+
+REGION_TYPES = ['major', 'minor']
+REGION_KEY_VALUES = [x + '_region' for x in REGION_TYPES]
+SHAPE_KEYS = [x + '_shapes' for x in REGION_TYPES]
+
+def parse_nodes(root):
+	nodes = {}
+	for child in root:
+		if child.tag == 'node':
+			lat, lon = float(child.attrib['lat']), float(child.attrib['lon'])
+			nodes[child.attrib['id']] = Location(lat, lon)
+	return nodes
+
+def parse_way(way, nodes):
+	def choose_shapes(way, boundary):
+		return (way['major_shapes']
+			if boundary == 'major_region'
+			else way['minor_shapes'])
+	result = {'minor_shapes': [], 'major_shapes': []}
+	shape = []
+	for child in way:
+		if child.tag == 'nd':
+			shape.append(child.attrib['ref'])
+		elif child.tag == 'tag':
+			key = child.attrib['k']
+			if key in SHAPE_KEYS:
+				raise ValueError(str.format('tag "{}" is not allowed', key))
+			result[key] = child.attrib['v']
+			if key == 'boundary' and result['boundary'] not in REGION_KEY_VALUES:
+				return None # we're not interested in it!
+	if shape[-1] != shape[0]:
+		raise ValueError('polygon is not closed: %r' % result)
+	if 'boundary' not in result:
+		raise ValueError('polygon not tagged as a boundary: %r' % result)
+	shape = [nodes[ref] for ref in shape[:-1]]
+	choose_shapes(result, result['boundary']).append(Polygon(*shape))
+	return result
+
+def parse_boundaries(root, *, nodes):
+	for child in root:
+		if child.tag == 'way':
+			way = parse_way(child, nodes = nodes)
+			if way:
+				yield way
+
+def parse_regions(filename):
+	from katakana import transliterate as transliterate_katakana
+	tree = ElementTree.parse(filename)
+	root = tree.getroot()
+	nodes = parse_nodes(root)
+	regions = dict()
+	extra_shapes = list()
+	for way in parse_boundaries(root, nodes = nodes):
+		if 'boundary' in way and way['boundary'] != 'subregion' and 'name' in way:
+			# defines a region
+			way['via_factor'] = int(way.get('via_factor', 1))
+			if way['name'] in regions:
+				raise ValueError(str.format(
+					'Region {name} defined twice',
+					name = repr(way['name']),
+				))
+			regions[way['name']] = way
+			del way['boundary']
+			if 'external' in way:
+				way['boundary'] = 'minor_region'
+			for prefix in ['', 'short_', 'internal_']:
+				name_key = prefix + 'name'
+				if name_key in way and way[name_key] and name_key + ':ja' not in way:
+					way[name_key + ':ja'] = transliterate_katakana(way[name_key])
+		elif 'boundary' in way and 'is_in' in way:
+			# adds an extra shape to an existing region
+			extra_shapes.append(way)
+	for extra_shape in extra_shapes:
+		name = extra_shape['is_in']
+		try:
+			region = regions[name]
+		except KeyError:
+			raise ValueError(str.format(
+				'Extra shape refers to {name} which was not found: {extra_shape}',
+				name = repr(name),
+				extra_shape = repr(extra_shape),
+			))
+		for key in SHAPE_KEYS:
+			region[key].extend(extra_shape[key])
+	return regions

mercurial