2015-02-24 22:51:30 +01:00
|
|
|
#!/usr/bin/env python2
|
|
|
|
|
|
|
|
#
|
|
|
|
# Converts a Virtual Radar Server BasicAircraftLookup.sqb database
|
|
|
|
# into a bunch of json files suitable for use by the webmap
|
|
|
|
#
|
|
|
|
|
2016-08-27 17:12:52 +02:00
|
|
|
import sqlite3, json, sys, csv
|
2015-02-24 22:51:30 +01:00
|
|
|
from contextlib import closing
|
|
|
|
|
2016-09-10 18:26:55 +02:00
|
|
|
def readcsv(name, infile, blocks):
|
|
|
|
print >>sys.stderr, 'Reading from', name
|
2015-02-24 22:51:30 +01:00
|
|
|
|
2016-09-10 18:26:55 +02:00
|
|
|
if len(blocks) == 0:
|
|
|
|
for i in xrange(16):
|
|
|
|
blocks['%01X' % i] = {}
|
|
|
|
|
|
|
|
ac_count = 0
|
2015-02-24 22:51:30 +01:00
|
|
|
|
2016-08-27 17:12:52 +02:00
|
|
|
reader = csv.DictReader(infile)
|
|
|
|
if not 'icao24' in reader.fieldnames:
|
|
|
|
raise RuntimeError('CSV should have at least an "icao24" column')
|
|
|
|
for row in reader:
|
|
|
|
icao24 = row['icao24']
|
|
|
|
|
2016-09-10 18:26:55 +02:00
|
|
|
entry = {}
|
2016-08-27 17:12:52 +02:00
|
|
|
for k,v in row.items():
|
|
|
|
if k != 'icao24' and v != '':
|
2016-09-10 18:26:55 +02:00
|
|
|
entry[k] = v
|
|
|
|
|
|
|
|
if len(entry) > 0:
|
|
|
|
ac_count += 1
|
|
|
|
|
|
|
|
bkey = icao24[0:1].upper()
|
|
|
|
dkey = icao24[1:].upper()
|
|
|
|
blocks[bkey].setdefault(dkey, {}).update(entry)
|
|
|
|
|
|
|
|
print >>sys.stderr, 'Read', ac_count, 'aircraft from', name
|
|
|
|
|
2019-08-16 09:16:55 +02:00
|
|
|
def cleandb(blocks):
|
|
|
|
for blockdata in blocks.values():
|
|
|
|
for dkey in list(blockdata.keys()):
|
|
|
|
block = blockdata[dkey]
|
|
|
|
for key in list(block.keys()):
|
|
|
|
if block[key] == '-COMPUTED-':
|
|
|
|
del block[key]
|
|
|
|
if len(block) == 0:
|
|
|
|
del blockdata[dkey]
|
|
|
|
|
2016-09-10 18:26:55 +02:00
|
|
|
def writedb(blocks, todir, blocklimit, debug):
|
|
|
|
block_count = 0
|
2016-08-27 17:12:52 +02:00
|
|
|
|
2015-02-25 14:51:28 +01:00
|
|
|
print >>sys.stderr, 'Writing blocks:',
|
|
|
|
|
|
|
|
queue = sorted(blocks.keys())
|
2015-02-24 22:51:30 +01:00
|
|
|
while queue:
|
|
|
|
bkey = queue[0]
|
|
|
|
del queue[0]
|
|
|
|
|
|
|
|
blockdata = blocks[bkey]
|
|
|
|
if len(blockdata) > blocklimit:
|
2015-02-25 14:51:28 +01:00
|
|
|
if debug: print >>sys.stderr, 'Splitting block', bkey, 'with', len(blockdata), 'entries..',
|
2015-02-25 00:04:26 +01:00
|
|
|
|
|
|
|
# split all children out
|
2015-02-24 22:51:30 +01:00
|
|
|
children = {}
|
|
|
|
for dkey in blockdata.keys():
|
|
|
|
new_bkey = bkey + dkey[0]
|
|
|
|
new_dkey = dkey[1:]
|
|
|
|
|
2015-02-25 00:04:26 +01:00
|
|
|
if new_bkey not in children: children[new_bkey] = {}
|
2015-02-24 22:51:30 +01:00
|
|
|
children[new_bkey][new_dkey] = blockdata[dkey]
|
|
|
|
|
2015-02-25 00:04:26 +01:00
|
|
|
# look for small children we can retain in the parent, to
|
|
|
|
# reduce the total number of files needed. This reduces the
|
|
|
|
# number of blocks needed from 150 to 61
|
|
|
|
blockdata = {}
|
2015-02-25 14:51:28 +01:00
|
|
|
children = sorted(children.items(), key=lambda x: len(x[1]))
|
2015-02-25 00:04:26 +01:00
|
|
|
retained = 1
|
|
|
|
|
|
|
|
while len(children[0][1]) + retained < blocklimit:
|
|
|
|
# move this child back to the parent
|
|
|
|
c_bkey, c_entries = children[0]
|
|
|
|
for c_dkey, entry in c_entries.items():
|
|
|
|
blockdata[c_bkey[-1] + c_dkey] = entry
|
|
|
|
retained += 1
|
|
|
|
del children[0]
|
|
|
|
|
2015-02-25 14:51:28 +01:00
|
|
|
if debug: print >>sys.stderr, len(children), 'children created,', len(blockdata), 'entries retained in parent'
|
|
|
|
children = sorted(children, key=lambda x: x[0])
|
|
|
|
blockdata['children'] = [x[0] for x in children]
|
2015-02-25 00:04:26 +01:00
|
|
|
blocks[bkey] = blockdata
|
|
|
|
for c_bkey, c_entries in children:
|
|
|
|
blocks[c_bkey] = c_entries
|
|
|
|
queue.append(c_bkey)
|
2015-02-24 22:51:30 +01:00
|
|
|
|
|
|
|
path = todir + '/' + bkey + '.json'
|
2015-02-25 14:51:28 +01:00
|
|
|
if debug: print >>sys.stderr, 'Writing', len(blockdata), 'entries to', path
|
|
|
|
else: print >>sys.stderr, bkey,
|
2015-02-24 22:51:30 +01:00
|
|
|
block_count += 1
|
|
|
|
with closing(open(path, 'w')) as f:
|
|
|
|
json.dump(obj=blockdata, fp=f, check_circular=False, separators=(',',':'), sort_keys=True)
|
|
|
|
|
2015-02-25 14:51:28 +01:00
|
|
|
print >>sys.stderr, 'done.'
|
|
|
|
print >>sys.stderr, 'Wrote', block_count, 'blocks'
|
2015-02-24 22:51:30 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
if len(sys.argv) < 3:
|
2016-08-27 17:12:52 +02:00
|
|
|
print >>sys.stderr, 'Reads a CSV file with aircraft information and produces a directory of JSON files'
|
2016-09-10 18:26:55 +02:00
|
|
|
print >>sys.stderr, 'Syntax: %s <path to CSV> [... additional CSV files ...] <path to DB dir>' % sys.argv[0]
|
2016-08-27 17:12:52 +02:00
|
|
|
print >>sys.stderr, 'Use "-" as the CSV path to read from stdin'
|
2016-09-10 18:26:55 +02:00
|
|
|
print >>sys.stderr, 'If multiple CSV files are specified and they provide conflicting data'
|
|
|
|
print >>sys.stderr, 'then the data from the last-listed CSV file is used'
|
2015-02-24 22:51:30 +01:00
|
|
|
sys.exit(1)
|
2016-09-10 18:26:55 +02:00
|
|
|
|
|
|
|
blocks = {}
|
|
|
|
for filename in sys.argv[1:-1]:
|
|
|
|
if filename == '-':
|
|
|
|
readcsv('stdin', sys.stdin, blocks)
|
2016-08-27 17:12:52 +02:00
|
|
|
else:
|
2016-09-10 18:26:55 +02:00
|
|
|
with closing(open(filename, 'r')) as infile:
|
|
|
|
readcsv(filename, infile, blocks)
|
|
|
|
|
2019-08-16 09:16:55 +02:00
|
|
|
cleandb(blocks)
|
2016-09-14 23:09:24 +02:00
|
|
|
writedb(blocks, sys.argv[-1], 2500, False)
|
2016-09-10 18:26:55 +02:00
|
|
|
sys.exit(0)
|