First bunch of files
authorJean-Michel Nirgal Vourgère <jmv@nirgal.com>
Sat, 29 May 2010 22:18:44 +0000 (22:18 +0000)
committerJean-Michel Nirgal Vourgère <jmv@nirgal.com>
Sat, 29 May 2010 22:18:44 +0000 (22:18 +0000)
47 files changed:
INSTALL [new file with mode: 0644]
bin/__init__.py [new file with mode: 0644]
bin/ais.py [new file with mode: 0755]
bin/area.py [new file with mode: 0644]
bin/db.py.SAMPLE [new file with mode: 0644]
bin/dj.py [new file with mode: 0755]
bin/djais/__init__.py [new file with mode: 0644]
bin/djais/basicauth.py [new file with mode: 0644]
bin/djais/models.py [new file with mode: 0644]
bin/djais/settings.py.SAMPLE [new file with mode: 0644]
bin/djais/templatetags/__init__.py [new file with mode: 0644]
bin/djais/templatetags/ais_extras.py [new file with mode: 0644]
bin/djais/urls.py [new file with mode: 0644]
bin/djais/views.py [new file with mode: 0644]
bin/earth3d.py [new file with mode: 0755]
bin/gpsdec.py [new file with mode: 0755]
bin/gpsdecoded.py [new file with mode: 0755]
bin/html_parser.py [new file with mode: 0755]
bin/make-countries.py [new file with mode: 0755]
bin/nmea.py [new file with mode: 0644]
bin/ntools.py [new file with mode: 0644]
bin/show_targets_planes.py [new file with mode: 0755]
bin/show_targets_ships.py [new file with mode: 0755]
bin/udp.py [new file with mode: 0755]
html_templates/base.html [new file with mode: 0644]
html_templates/fleet.html [new file with mode: 0644]
html_templates/fleet_users.html [new file with mode: 0644]
html_templates/fleet_vessel_add.html [new file with mode: 0644]
html_templates/fleet_vessels.html [new file with mode: 0644]
html_templates/fleets.html [new file with mode: 0644]
html_templates/index.html [new file with mode: 0644]
html_templates/logout.html [new file with mode: 0644]
html_templates/sources.html [new file with mode: 0644]
html_templates/user_change_password.html [new file with mode: 0644]
html_templates/user_delete.html [new file with mode: 0644]
html_templates/user_detail.html [new file with mode: 0644]
html_templates/user_edit.html [new file with mode: 0644]
html_templates/users.html [new file with mode: 0644]
html_templates/vessel.html [new file with mode: 0644]
html_templates/vessel_index.html [new file with mode: 0644]
html_templates/vessel_manual_input.html [new file with mode: 0644]
html_templates/vessels.html [new file with mode: 0644]
kmz_icons/boat-invis.png [new file with mode: 0644]
kmz_icons/boat-stop.png [new file with mode: 0644]
kmz_icons/boat.png [new file with mode: 0644]
kmz_icons/capital_small.png [new file with mode: 0644]
kmz_icons/plane.png [new file with mode: 0644]

diff --git a/INSTALL b/INSTALL
new file mode 100644 (file)
index 0000000..6b8eb24
--- /dev/null
+++ b/INSTALL
@@ -0,0 +1,4 @@
+create a group "ais"
+create directory /var/lib/ais g+ws :ais
+adduser www-data ais
+make sure apache2 has umask 002 and not 022
diff --git a/bin/__init__.py b/bin/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/bin/ais.py b/bin/ais.py
new file mode 100755 (executable)
index 0000000..4382bc1
--- /dev/null
@@ -0,0 +1,1861 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+import struct
+import logging
+from datetime import datetime, timedelta, date, time
+from fcntl import lockf, LOCK_EX, LOCK_UN, LOCK_SH
+import csv
+
+from ntools import *
+from db import *
+from area import *
+from earth3d import *
+
+DB_STARTDATE = datetime(2008, 6, 1)
+
+# This is the location of the filesystem database
+DBPATH='/var/lib/ais/db'
+
+# see make-countries.py
+COUNTRIES_MID = {
+    201: u'Albania',
+    202: u'Andorra',
+    203: u'Austria',
+    204: u'Azores',
+    205: u'Belgium',
+    206: u'Belarus',
+    207: u'Bulgaria',
+    208: u'Vatican City State',
+    209: u'Cyprus',
+    210: u'Cyprus',
+    211: u'Germany',
+    212: u'Cyprus',
+    213: u'Georgia',
+    214: u'Moldova',
+    215: u'Malta',
+    216: u'Armenia',
+    218: u'Germany',
+    219: u'Denmark',
+    220: u'Denmark',
+    224: u'Spain',
+    225: u'Spain',
+    226: u'France',
+    227: u'France',
+    228: u'France',
+    230: u'Finland',
+    231: u'Faroe Islands',
+    232: u'United Kingdom',
+    233: u'United Kingdom',
+    234: u'United Kingdom',
+    235: u'United Kingdom',
+    236: u'Gibraltar',
+    237: u'Greece',
+    238: u'Croatia',
+    239: u'Greece',
+    240: u'Greece',
+    242: u'Morocco',
+    243: u'Hungary',
+    244: u'Netherlands',
+    245: u'Netherlands',
+    246: u'Netherlands',
+    247: u'Italy',
+    248: u'Malta',
+    249: u'Malta',
+    250: u'Ireland',
+    251: u'Iceland',
+    252: u'Liechtenstein',
+    253: u'Luxembourg',
+    254: u'Monaco',
+    255: u'Madeira',
+    256: u'Malta',
+    257: u'Norway',
+    258: u'Norway',
+    259: u'Norway',
+    261: u'Poland',
+    262: u'Montenegro',
+    263: u'Portugal',
+    264: u'Romania',
+    265: u'Sweden',
+    266: u'Sweden',
+    267: u'Slovak Republic',
+    268: u'San Marino',
+    269: u'Switzerland',
+    270: u'Czech Republic',
+    271: u'Turkey',
+    272: u'Ukraine',
+    273: u'Russian Federation',
+    274: u'The Former Yugoslav Republic of Macedonia',
+    275: u'Latvia',
+    276: u'Estonia',
+    277: u'Lithuania',
+    278: u'Slovenia',
+    279: u'Serbia',
+    301: u'Anguilla',
+    303: u'Alaska',
+    304: u'Antigua and Barbuda',
+    305: u'Antigua and Barbuda',
+    306: u'Netherlands Antilles',
+    307: u'Aruba',
+    308: u'Bahamas',
+    309: u'Bahamas',
+    310: u'Bermuda',
+    311: u'Bahamas',
+    312: u'Belize',
+    314: u'Barbados',
+    316: u'Canada',
+    319: u'Cayman Islands',
+    321: u'Costa Rica',
+    323: u'Cuba',
+    325: u'Dominica',
+    327: u'Dominican Republic',
+    329: u'Guadeloupe',
+    330: u'Grenada',
+    331: u'Greenland',
+    332: u'Guatemala',
+    334: u'Honduras',
+    336: u'Haiti',
+    338: u'United States of America',
+    339: u'Jamaica',
+    341: u'Saint Kitts and Nevis',
+    343: u'Saint Lucia',
+    345: u'Mexico',
+    347: u'Martinique',
+    348: u'Montserrat',
+    350: u'Nicaragua',
+    351: u'Panama',
+    352: u'Panama',
+    353: u'Panama',
+    354: u'Panama',
+    355: u'Panama',
+    356: u'Panama',
+    357: u'Panama',
+    358: u'Puerto Rico',
+    359: u'El Salvador',
+    361: u'Saint Pierre and Miquelon',
+    362: u'Trinidad and Tobago',
+    364: u'Turks and Caicos Islands',
+    366: u'United States of America',
+    367: u'United States of America',
+    368: u'United States of America',
+    369: u'United States of America',
+    370: u'Panama',
+    371: u'Panama',
+    372: u'Panama',
+    375: u'Saint Vincent and the Grenadines',
+    376: u'Saint Vincent and the Grenadines',
+    377: u'Saint Vincent and the Grenadines',
+    378: u'British Virgin Islands',
+    379: u'United States Virgin Islands',
+    401: u'Afghanistan',
+    403: u'Saudi Arabia',
+    405: u'Bangladesh',
+    408: u'Bahrain',
+    410: u'Bhutan',
+    412: u'China',
+    413: u'China',
+    416: u'Taiwan',
+    417: u'Sri Lanka',
+    419: u'India',
+    422: u'Iran',
+    423: u'Azerbaijani Republic',
+    425: u'Iraq',
+    428: u'Israel',
+    431: u'Japan',
+    432: u'Japan',
+    434: u'Turkmenistan',
+    436: u'Kazakhstan',
+    437: u'Uzbekistan',
+    438: u'Jordan',
+    440: u'Korea',
+    441: u'Korea',
+    443: u'Palestine',
+    445: u"Democratic People's Republic of Korea",
+    447: u'Kuwait',
+    450: u'Lebanon',
+    451: u'Kyrgyz Republic',
+    453: u'Macao',
+    455: u'Maldives',
+    457: u'Mongolia',
+    459: u'Nepal',
+    461: u'Oman',
+    463: u'Pakistan',
+    466: u'Qatar',
+    468: u'Syrian Arab Republic',
+    470: u'United Arab Emirates',
+    473: u'Yemen',
+    475: u'Yemen',
+    477: u'Hong Kong',
+    478: u'Bosnia and Herzegovina',
+    501: u'Adelie Land',
+    503: u'Australia',
+    506: u'Myanmar',
+    508: u'Brunei Darussalam',
+    510: u'Micronesia',
+    511: u'Palau',
+    512: u'New Zealand',
+    514: u'Cambodia',
+    515: u'Cambodia',
+    516: u'Christmas Island',
+    518: u'Cook Islands',
+    520: u'Fiji',
+    523: u'Cocos',
+    525: u'Indonesia',
+    529: u'Kiribati',
+    531: u"Lao People's Democratic Republic",
+    533: u'Malaysia',
+    536: u'Northern Mariana Islands',
+    538: u'Marshall Islands',
+    540: u'New Caledonia',
+    542: u'Niue',
+    544: u'Nauru',
+    546: u'French Polynesia',
+    548: u'Philippines',
+    553: u'Papua New Guinea',
+    555: u'Pitcairn Island',
+    557: u'Solomon Islands',
+    559: u'American Samoa',
+    561: u'Samoa',
+    563: u'Singapore',
+    564: u'Singapore',
+    565: u'Singapore',
+    567: u'Thailand',
+    570: u'Tonga',
+    572: u'Tuvalu',
+    574: u'Viet Nam',
+    576: u'Vanuatu',
+    578: u'Wallis and Futuna Islands',
+    601: u'South Africa',
+    603: u'Angola',
+    605: u'Algeria',
+    607: u'Saint Paul and Amsterdam Islands',
+    608: u'Ascension Island',
+    609: u'Burundi',
+    610: u'Benin',
+    611: u'Botswana',
+    612: u'Central African Republic',
+    613: u'Cameroon',
+    615: u'Congo',
+    616: u'Comoros',
+    617: u'Cape Verde',
+    618: u'Crozet Archipelago',
+    619: u"C\xc3\xb4te d'Ivoire",
+    621: u'Djibouti',
+    622: u'Egypt',
+    624: u'Ethiopia',
+    625: u'Eritrea',
+    626: u'Gabonese Republic',
+    627: u'Ghana',
+    629: u'Gambia',
+    630: u'Guinea-Bissau',
+    631: u'Equatorial Guinea',
+    632: u'Guinea',
+    633: u'Burkina Faso',
+    634: u'Kenya',
+    635: u'Kerguelen Islands',
+    636: u'Liberia',
+    637: u'Liberia',
+    642: u"Socialist People's Libyan Arab Jamahiriya",
+    644: u'Lesotho',
+    645: u'Mauritius',
+    647: u'Madagascar',
+    649: u'Mali',
+    650: u'Mozambique',
+    654: u'Mauritania',
+    655: u'Malawi',
+    656: u'Niger',
+    657: u'Nigeria',
+    659: u'Namibia',
+    660: u'Reunion',
+    661: u'Rwanda',
+    662: u'Sudan',
+    663: u'Senegal',
+    664: u'Seychelles',
+    665: u'Saint Helena',
+    666: u'Somali Democratic Republic',
+    667: u'Sierra Leone',
+    668: u'Sao Tome and Principe',
+    669: u'Swaziland',
+    670: u'Chad',
+    671: u'Togolese Republic',
+    672: u'Tunisia',
+    674: u'Tanzania',
+    675: u'Uganda',
+    676: u'Democratic Republic of the Congo',
+    677: u'Tanzania',
+    678: u'Zambia',
+    679: u'Zimbabwe',
+    701: u'Argentine Republic',
+    710: u'Brazil',
+    720: u'Bolivia',
+    725: u'Chile',
+    730: u'Colombia',
+    735: u'Ecuador',
+    740: u'Falkland Islands',
+    745: u'Guiana',
+    750: u'Guyana',
+    755: u'Paraguay',
+    760: u'Peru',
+    765: u'Suriname',
+    770: u'Uruguay',
+    775: u'Venezuela',
+}
+
+STATUS_CODES = {
+     0:  'Under way using engine',
+     1:  'At anchor',
+     2:  'Not under command',
+     3:  'Restricted manoeuverability',
+     4:  'Constrained by her draught',
+     5:  'Moored',
+     6:  'Aground',
+     7:  'Engaged in Fishing',
+     8:  'Under way sailing',
+     9:  '9 - Reserved for future amendment of Navigational Status for HSC',
+    10:  '10 - Reserved for future amendment of Navigational Status for WIG',
+    11:  '11 - Reserved for future use',
+    12:  '12 - Reserved for future use',
+    13:  '13 - Reserved for future use',
+    14:  '14 - Reserved for future use', # Land stations
+    15:  'Not defined', # default
+}
+
+SHIP_TYPES = {
+     0: 'Not available (default)',
+     1: 'Reserved for future use',
+     2: 'Reserved for future use',
+     3: 'Reserved for future use',
+     4: 'Reserved for future use',
+     5: 'Reserved for future use',
+     6: 'Reserved for future use',
+     7: 'Reserved for future use',
+     8: 'Reserved for future use',
+     9: 'Reserved for future use',
+    10: 'Reserved for future use',
+    11: 'Reserved for future use',
+    12: 'Reserved for future use',
+    13: 'Reserved for future use',
+    14: 'Reserved for future use',
+    15: 'Reserved for future use',
+    16: 'Reserved for future use',
+    17: 'Reserved for future use',
+    18: 'Reserved for future use',
+    19: 'Reserved for future use',
+    20: 'Wing in ground (WIG), all ships of this type',
+    21: 'Wing in ground (WIG), Hazardous category A',
+    22: 'Wing in ground (WIG), Hazardous category B',
+    23: 'Wing in ground (WIG), Hazardous category C',
+    24: 'Wing in ground (WIG), Hazardous category D',
+    25: 'Wing in ground (WIG), Reserved for future use',
+    26: 'Wing in ground (WIG), Reserved for future use',
+    27: 'Wing in ground (WIG), Reserved for future use',
+    28: 'Wing in ground (WIG), Reserved for future use',
+    29: 'Wing in ground (WIG), Reserved for future use',
+    30: 'Fishing',
+    31: 'Towing',
+    32: 'Towing: length exceeds 200m or breadth exceeds 25m',
+    33: 'Dredging or underwater ops',
+    34: 'Diving ops',
+    35: 'Military ops',
+    36: 'Sailing',
+    37: 'Pleasure Craft',
+    38: 'Reserved',
+    39: 'Reserved',
+    40: 'High speed craft (HSC), all ships of this type',
+    41: 'High speed craft (HSC), Hazardous category A',
+    42: 'High speed craft (HSC), Hazardous category B',
+    43: 'High speed craft (HSC), Hazardous category C',
+    44: 'High speed craft (HSC), Hazardous category D',
+    45: 'High speed craft (HSC), Reserved for future use',
+    46: 'High speed craft (HSC), Reserved for future use',
+    47: 'High speed craft (HSC), Reserved for future use',
+    48: 'High speed craft (HSC), Reserved for future use',
+    49: 'High speed craft (HSC), No additional information',
+    50: 'Pilot Vessel',
+    51: 'Search and Rescue vessel',
+    52: 'Tug',
+    53: 'Port Tender',
+    54: 'Anti-pollution equipment',
+    55: 'Law Enforcement',
+    56: 'Spare - Local Vessel',
+    57: 'Spare - Local Vessel',
+    58: 'Medical Transport',
+    59: 'Ship according to RR Resolution No. 18',
+    60: 'Passenger, all ships of this type',
+    61: 'Passenger, Hazardous category A',
+    62: 'Passenger, Hazardous category B',
+    63: 'Passenger, Hazardous category C',
+    64: 'Passenger, Hazardous category D',
+    65: 'Passenger, Reserved for future use',
+    66: 'Passenger, Reserved for future use',
+    67: 'Passenger, Reserved for future use',
+    68: 'Passenger, Reserved for future use',
+    69: 'Passenger, No additional information',
+    70: 'Cargo', # 'Cargo, all ships of this type',
+    71: 'Cargo, Hazardous category A',
+    72: 'Cargo, Hazardous category B',
+    73: 'Cargo, Hazardous category C',
+    74: 'Cargo, Hazardous category D',
+    75: 'Cargo', # 'Cargo, Reserved for future use',
+    76: 'Cargo', # 'Cargo, Reserved for future use',
+    77: 'Cargo', # 'Cargo, Reserved for future use',
+    78: 'Cargo', # 'Cargo, Reserved for future use',
+    79: 'Cargo', # 'Cargo, No additional information',
+    80: 'Tanker', # 'Tanker, all ships of this type',
+    81: 'Tanker, Hazardous category A',
+    82: 'Tanker, Hazardous category B',
+    83: 'Tanker, Hazardous category C',
+    84: 'Tanker, Hazardous category D',
+    85: 'Tanker', # 'Tanker, Reserved for future use',
+    86: 'Tanker', # 'Tanker, Reserved for future use',
+    87: 'Tanker', # 'Tanker, Reserved for future use',
+    88: 'Tanker', # 'Tanker, Reserved for future use',
+    89: 'Tanker, No additional information',
+    90: 'Other Type, all ships of this type',
+    91: 'Other Type, Hazardous category A',
+    92: 'Other Type, Hazardous category B',
+    93: 'Other Type, Hazardous category C',
+    94: 'Other Type, Hazardous category D',
+    95: 'Other Type, Reserved for future use',
+    96: 'Other Type, Reserved for future use',
+    97: 'Other Type, Reserved for future use',
+    98: 'Other Type, Reserved for future use',
+    99: 'Other Type, no additional information',
+    100: 'Default Navaid',
+    101: 'Reference point',
+    102: 'RACON',
+    103: 'Offshore Structure',
+    104: 'Spare',
+    105: 'Light, without sectors',
+    106: 'Light, with sectors',
+    107: 'Leading Light Front',
+    108: 'Leading Light Rear',
+    109: 'Beacon, Cardinal N',
+    110: 'Beacon, Cardinal E',
+    111: 'Beacon, Cardinal S',
+    112: 'Beacon, Cardinal W',
+    113: 'Beacon, Port hand',
+    114: 'Beacon, Starboard hand',
+    115: 'Beacon, Preferred Channel port hand',
+    116: 'Beacon, Preferred Channel starboard hand',
+    117: 'Beacon, Isolated danger',
+    118: 'Beacon, Safe water',
+    119: 'Beacon, Special mark',
+    120: 'Cardinal Mark N',
+    121: 'Cardinal Mark E',
+    122: 'Cardinal Mark S',
+    123: 'Cardinal Mark W',
+    124: 'Port hand Mark',
+    125: 'Starboard hand Mark',
+    126: 'Preferred Channel Port hand',
+    127: 'Preferred Channel Starboard hand',
+    128: 'Isolated danger',
+    129: 'Safe Water',
+    130: 'Manned VTS / Special Mark',
+    131: 'Light Vessel / LANBY',
+}
+
+AIS_STATUS_NOT_AVAILABLE = 15
+AIS_ROT_HARD_LEFT = -127
+AIS_ROT_HARD_RIGHT = 127
+AIS_ROT_NOT_AVAILABLE = -128 # not like gpsd
+
+AIS_LATLON_SCALE = 600000.0
+AIS_LON_NOT_AVAILABLE = 0x6791AC0
+AIS_LAT_NOT_AVAILABLE = 0x3412140
+AIS_COG_SCALE = 10.0
+AIS_COG_NOT_AVAILABLE = 3600
+AIS_NO_HEADING = 511
+AIS_SOG_SCALE = 10.0
+AIS_SOG_NOT_AVAILABLE = 1023
+AIS_SOG_FAST_MOVER = 1022
+AIS_SOG_MAX_SPEED = 1021
+
+
+def hash3_pathfilename(filename):
+    return filename[0]+'/'+filename[:2]+'/'+filename[:3]+'/'+filename
+
+
+def db_bydate_addrecord(basefilename, record, timestamp):
+    strdt = datetime.utcfromtimestamp(timestamp).strftime('%Y%m%d')
+    filename = DBPATH+'/bydate/'+strdt+'/'+hash3_pathfilename(basefilename)
+    f = open_with_mkdirs(filename, 'ab')
+    lockf(f, LOCK_EX)
+    #f.seek(0,2) # go to EOF
+    assert f.tell()%len(record) == 0
+    f.write(record)
+    f.close()
+
+
+def db_lastinfo_setrecord_ifnewer(basefilename, record, timestamp):
+    '''
+    Overwrite last information if date is newer
+    Input record must be complete
+    '''
+    filename = DBPATH+'/last/'+hash3_pathfilename(basefilename)
+
+    try:
+        f = open(filename, 'r+b')
+    except IOError, ioerr:
+        if ioerr.errno!=2:
+            raise
+        # File was not found? Ok, create it. FIXME: we should lock something...
+        f = open_with_mkdirs(filename, 'wb')
+        f.write(record)
+        updated = True
+    else:
+        lockf(f, LOCK_EX)
+        assert f.tell() == 0
+        oldrecord=f.read(4)
+        assert len(oldrecord) == 4
+        oldtimestamp = struct.unpack('I', oldrecord)[0]
+        f.seek(0)
+        assert f.tell() == 0
+        if timestamp>oldtimestamp:
+            f.write(record)
+            assert f.tell() == len(record), "tell="+str(f.tell())+' size='+str(len(record))
+            updated = True
+        else:
+            updated = False
+    f.close()
+    return updated
+
+
+def sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source):
+    ''' Don't call directly '''
+    sqlinfo = {}
+    sqlinfo['mmsi'] = strmmsi_to_mmsi(strmmsi)
+    sqlinfo['updated'] = datetime.utcfromtimestamp(timestamp)
+    sqlinfo['imo'] = imo or None
+    sqlinfo['name'] = name or None
+    sqlinfo['callsign'] = callsign or None
+    sqlinfo['type'] = type
+    sqlinfo['destination'] = None
+    if destination:
+        destination = destination.replace('\0', ' ').rstrip(' @\0')
+    sqlinfo['destination'] = destination or None
+    sqlinfo['source'] = source
+    sqlexec(u'''INSERT INTO vessel (mmsi, updated) SELECT %(mmsi)s, '1970-01-01T00:00:00' WHERE NOT EXISTS (SELECT * FROM vessel WHERE mmsi=%(mmsi)s)''', sqlinfo)
+    if sqlinfo['imo']:
+        sqlexec(u'UPDATE vessel SET imo = %(imo)s WHERE mmsi=%(mmsi)s AND (imo IS NULL OR updated<%(updated)s)', sqlinfo)
+    if sqlinfo['name']:
+        sqlexec(u'UPDATE vessel SET name = %(name)s WHERE mmsi=%(mmsi)s AND (name IS NULL OR updated<%(updated)s)', sqlinfo)
+    if sqlinfo['callsign']:
+        sqlexec(u'UPDATE vessel SET callsign = %(callsign)s WHERE mmsi=%(mmsi)s AND (callsign IS NULL OR updated<%(updated)s)', sqlinfo)
+    if sqlinfo['type']:
+        sqlexec(u'UPDATE vessel SET type = %(type)s WHERE mmsi=%(mmsi)s AND (type IS NULL OR updated<%(updated)s)', sqlinfo)
+    if sqlinfo['destination']:
+        sqlexec(u'UPDATE vessel SET destination = %(destination)s WHERE mmsi=%(mmsi)s AND (destination IS NULL OR updated<%(updated)s)', sqlinfo)
+    sqlexec(u'UPDATE vessel SET (updated, source) = (%(updated)s, %(source)s) WHERE mmsi=%(mmsi)s AND updated<%(updated)s', sqlinfo)
+    dbcommit()
+
+
+
+
+aivdm_record123_format = 'IBbhiiII4s'
+aivdm_record123_length = struct.calcsize(aivdm_record123_format)
+aivdm_record5_format = 'II20s7sBHHBBBBBBH20s4s'
+aivdm_record5_length = struct.calcsize(aivdm_record5_format)
+
+
+def add_nmea1(strmmsi, timestamp, status, rot, sog, latitude, longitude, cog, heading, source):
+    '''
+    Input is raw data, unscaled
+    '''
+    record = struct.pack(aivdm_record123_format, timestamp, status, rot, sog, latitude, longitude, cog, heading, source)
+    #print repr(record)
+    filename = strmmsi+'.nmea1'
+    db_bydate_addrecord(filename, record, timestamp)
+    # There's no need to be smart: all the information are taken, or none.
+    return db_lastinfo_setrecord_ifnewer(filename, record, timestamp)
+
+
+def add_nmea5_full(strmmsi, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source):
+    '''
+    Input is raw data, unscaled
+    All fields are set, and can be upgraded if the record is newer
+    '''
+    record = struct.pack(aivdm_record5_format, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+    #print repr(record)
+    filename = strmmsi+'.nmea5'
+    db_bydate_addrecord(filename, record, timestamp)
+    updated = db_lastinfo_setrecord_ifnewer(filename, record, timestamp)
+    if updated:
+        sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+    return updated
+
+def add_nmea5_partial(strmmsi, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source):
+    '''
+    Input is raw data, unscaled
+    All fields are not set. Only some of them can be upgraded, if they're newer
+    '''
+    record = struct.pack(aivdm_record5_format, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+    #print repr(record)
+    filename = strmmsi+'.nmea5'
+    db_bydate_addrecord(filename, record, timestamp)
+
+    updated = False
+    filename = DBPATH+'/last/'+hash3_pathfilename(filename)
+    try:
+        f = open(filename, 'r+b')
+    except IOError, ioerr:
+        if ioerr.errno!=2:
+            raise
+        # File was not found? Ok, create it. FIXME: we should lock something...
+        f = open_with_mkdirs(filename, 'wb')
+        lockf(f, LOCK_EX)
+        f.write(record)
+        # keep the lock
+        updated = True
+    else:
+        lockf(f, LOCK_EX)
+        oldrecord = f.read(aivdm_record5_length)
+        oldtimestamp, oldimo, oldname, oldcallsign, oldtype, olddim_bow, olddim_stern, olddim_port, olddim_starboard, oldeta_M, oldeta_D, oldeta_h, oldeta_m, olddraught, olddestination, oldsource = struct.unpack(aivdm_record5_format, oldrecord)
+        if timestamp > oldtimestamp:
+            # we have incoming recent information
+            if imo==0:
+                imo = oldimo
+            if name=='':
+                name = oldname
+            if callsign=='':
+                callsign = oldcallsign
+            if type==0:
+                type = oldtype
+            if dim_bow==0:
+                dim_bow = olddim_bow
+            if dim_stern==0:
+                dim_stern = olddim_stern
+            if dim_port==0:
+                dim_port = olddim_port
+            if dim_starboard==0:
+                dim_starboard = olddim_starboard
+            if eta_M==0 or eta_D==0 or eta_h==24 or eta_m==60 or destination=='':
+                eta_M = oldeta_M
+                eta_D = oldeta_D
+                eta_h = oldeta_h
+                eta_m = oldeta_m
+                destination = olddestination
+            if draught==0:
+                draught = olddraught
+            record = struct.pack(aivdm_record5_format, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+            f.seek(0)
+            f.write(record)
+            updated = True
+        else:
+            # we received an obsolete info, but maybe there are some new things in it
+            if oldimo==0 and imo!=0:
+                oldimo = imo
+                updated = True
+            if oldname=='' and name!='':
+                oldname = name
+                updated = True
+            if oldcallsign=='' and callsign!='':
+                oldcallsign = callsign
+                updated = True
+            if oldtype==0 and type!=0:
+                oldtype = type
+                updated = True
+            if olddim_bow==0 and dim_bow!=0:
+                olddim_bow = dim_bow
+                updated = True
+            if olddim_stern==0 and dim_stern!=0:
+                olddim_stern = dim_stern
+                updated = True
+            if olddim_port==0 and dim_port!=0:
+                olddim_port = dim_port
+                updated = True
+            if olddim_starboard==0 and dim_starboard!=0:
+                olddim_starboard = dim_starboard
+                updated = True
+            if (oldeta_M==0 or oldeta_D==0 or olddestination=='') and ((eta_M!=0 and eta_D!=0) or destination!=''):
+                oldeta_M = eta_M
+                oldeta_D = eta_D
+                oldeta_h = eta_h
+                oldeta_m = eta_m
+                olddestination = destination
+                updated = True
+            if olddraught==0 and draught!=0:
+                olddraught = draught
+                updated = True
+            if updated:
+                oldsource = source
+                record = struct.pack(aivdm_record5_format, oldtimestamp, oldimo, oldname, oldcallsign, oldtype, olddim_bow, olddim_stern, olddim_port, olddim_starboard, oldeta_M, oldeta_D, oldeta_h, oldeta_m, olddraught, olddestination, oldsource)
+            
+                f.seek(0)
+                f.write(record)
+    # keep the file locked during SQL updates
+    if updated:
+        sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+    f.close()
+    return updated
+
+
+
+def strmmsi_to_mmsi(strmmsi):
+    if strmmsi.isdigit():
+        return int(strmmsi)
+    else:
+        assert strmmsi[3:5]=='MI'
+        strmmsi = strmmsi[:3]+'00'+strmmsi[5:]
+        return int('-'+strmmsi)
+
+
+def mmsi_to_strmmsi(mmsi):
+    if mmsi>=0:
+        return "%08d" % mmsi
+    strmmsi = "%08d" % -mmsi
+    assert strmmsi[3:5]=='00'
+    strmmsi = strmmsi[:3]+'MI'+strmmsi[5:]
+    return strmmsi
+
+
+__misources = {} # cache of manual source names
+def _get_mi_sourcename(id):
+    global __misources
+    if not __misources:
+        sqlexec(u'SELECT id, name FROM mi_source')
+        while True:
+            row = get_common_cursor().fetchone()
+            if row is None:
+                break
+            __misources[row[0]] = row[1]
+    result = __misources.get(id, None)
+    if result is None:
+        return u"Manual input #%s" % id
+    return result
+
+
+class Nmea1:
+    def __init__(self, timestamp, status=AIS_STATUS_NOT_AVAILABLE, rot=AIS_ROT_NOT_AVAILABLE, sog=AIS_SOG_NOT_AVAILABLE, latitude=AIS_LAT_NOT_AVAILABLE, longitude=AIS_LON_NOT_AVAILABLE, cog=AIS_COG_NOT_AVAILABLE, heading=AIS_NO_HEADING, source='\x00\x00\x00\x00'):
+        self.timestamp_1 = timestamp
+        self.status      = status
+        self.rot         = rot
+        self.sog         = sog
+        self.latitude    = latitude
+        self.longitude   = longitude
+        self.cog         = cog
+        self.heading     = heading
+        self.source_1    = source
+
+    from_values = __init__
+
+    def to_values(self):
+        return self.timestamp_1, self.status, self.rot, self.sog, self.latitude, self.longitude, self.cog, self.heading, self.source_1
+
+    def from_record(self, record):
+        values = struct.unpack(aivdm_record123_format, record)
+        Nmea1.__init__(self, *values)
+
+    @staticmethod
+    def new_from_record(record):
+        values = struct.unpack(aivdm_record123_format, record)
+        return Nmea1(*values)
+
+    def to_record(self):
+        return struct.pack(aivdm_record123_format, *Nmea1.to_values())
+        
+    def from_file(self, file):
+        record = file.read(aivdm_record123_length)
+        Nmea1.from_record(self, record)
+
+    @staticmethod
+    def new_from_file(file):
+        record = file.read(aivdm_record123_length)
+        return Nmea1.new_from_record(record)
+
+    def from_lastinfo(self, strmmsi):
+        filename_nmea1 = DBPATH+'/last/'+hash3_pathfilename(strmmsi+'.nmea1')
+        try:
+            f = file(filename_nmea1, 'rb')
+        except IOError:
+            logging.debug("file %s doesn't exists" % filename_nmea1)
+            return
+        lockf(f, LOCK_SH)
+        Nmea1.from_file(self, f)
+        f.close()
+
+    @staticmethod
+    def new_from_lastinfo(strmmsi):
+        filename_nmea1 = DBPATH+'/last/'+hash3_pathfilename(strmmsi+'.nmea1')
+        try:
+            f = file(filename_nmea1, 'rb')
+        except IOError:
+            logging.debug("file %s doesn't exists" % filename_nmea1)
+            return None
+        lockf(f, LOCK_SH)
+        record = f.read(aivdm_record123_length)
+        f.close()
+        return Nmea1.new_from_record(record)
+
+
+    def dump_to_stdout(self):
+        print datetime.utcfromtimestamp(self.timestamp_1), 
+        for i in (self.status, self.rot, self.sog, self.latitude/AIS_LATLON_SCALE, self.longitude/AIS_LATLON_SCALE, self.cog, self.heading, self.source_1):
+            print repr(i),
+        print
+    @staticmethod
+    def _clean_str(txt):
+        if txt is None:
+            return ''
+        return txt.replace('\0','').replace('@', '').strip()
+
+    def get_status(self, default='Unknown'):
+        return STATUS_CODES.get(self.status, default)
+    def get_sog_str(self, default='Unknown'):
+        if self.sog == AIS_SOG_NOT_AVAILABLE:
+            return default
+        if self.sog == AIS_SOG_FAST_MOVER:
+            return 'over 102.2 kts'
+        return '%.1f kts' % (self.sog/AIS_SOG_SCALE)
+
+    def get_rot_str(self, default='Unknown'):
+        if self.rot == AIS_ROT_NOT_AVAILABLE:
+            return default
+        if self.rot == 0:
+            return 'Not turning'
+        if self.rot < 0:
+            side = 'port'
+        else:
+            side = 'starboard'
+        rot = abs(self.rot)
+        if rot == 127:
+            r = 'To '
+        else:
+            r = '%d %% to ' % rot*100./127
+        return r + side
+
+    @staticmethod
+    def _decimaldegree_to_dms(f, emispheres):
+        if f>=0:
+            e = emispheres[0]
+        else:
+            f = -f
+            e = emispheres[1]
+        result = '%d°' % int(f)
+        f = (f%1)*60
+        result += '%02.05f\' ' % f
+        result += e
+        return result
+
+    def get_latitude_str(self, default='Unknown'):
+        if self.latitude==AIS_LAT_NOT_AVAILABLE:
+            return default
+        return Nmea1._decimaldegree_to_dms(self.latitude / AIS_LATLON_SCALE, 'NS')
+
+    def get_longitude_str(self, default='Unknown'):
+        if self.longitude==AIS_LON_NOT_AVAILABLE:
+            return default
+        return Nmea1._decimaldegree_to_dms(self.longitude / AIS_LATLON_SCALE, 'EW')
+
+    def get_cog_str(self, default='Unknown'):
+        if self.cog == AIS_COG_NOT_AVAILABLE:
+            return default
+        return '%.1f°' % (self.cog/10.)
+
+    def get_heading_str(self, default='Unknown'):
+        if self.heading == AIS_NO_HEADING:
+            return default
+        return '%s°' % self.heading
+
+    def get_source_1_str(self):
+        return Nmea.format_source(self.source_1)
+
+class Nmea5:
+    def __init__(self, timestamp, imo=0, name='', callsign='', type=0, dim_bow=0, dim_stern=0, dim_port=0, dim_starboard=0, eta_M=0, eta_D=0, eta_h=24, eta_m=60, draught=0, destination='', source=''):
+        self.timestamp_5   = timestamp
+        self.imo           = imo
+        self.name          = name         
+        self.callsign      = callsign
+        self.type          = type
+        self.dim_bow       = dim_bow
+        self.dim_stern     = dim_stern
+        self.dim_port      = dim_port
+        self.dim_starboard = dim_starboard
+        self.eta_M         = eta_M
+        self.eta_D         = eta_D
+        self.eta_h         = eta_h
+        self.eta_m         = eta_m
+        self.draught       = draught
+        self.destination   = destination
+        self.source_5      = source
+
+    from_values = __init__
+
+    def merge_from_values(self, timestamp, imo=0, name='', callsign='', type=0, dim_bow=0, dim_stern=0, dim_port=0, dim_starboard=0, eta_M=0, eta_D=0, eta_h=24, eta_m=60, draught=0, destination='', source=''):
+        updated = False
+        if self.imo==0 or imo!=0:
+            self.imo = imo
+            updated = True
+        if self.name=='' or name!='':
+            self.name = name
+            updated = True
+        if self.callsign=='' or callsign!='':
+            self.callsign = callsign
+            updated = True
+        if self.type==0 or type!=0:
+            self.type = type
+            updated = True
+        if self.dim_bow==0 or dim_bow!=0:
+            self.dim_bow = dim_bow
+            updated = True
+        if self.dim_stern==0 or dim_stern!=0:
+            self.dim_stern = dim_stern
+            updated = True
+        if self.dim_port==0 or dim_port!=0:
+            self.dim_port = dim_port
+            updated = True
+        if self.dim_starboard==0 or dim_starboard!=0:
+            self.dim_starboard = dim_starboard
+            updated = True
+        if (self.eta_M==0 and self.eta_D==0 and self.eta_h==24 and self.eta_m==60) or eta_M!=0 or eta_D!=0 or eta_h!=24 or eta_m!=60:
+            self.eta_M = eta_M
+            self.eta_D = eta_D
+            self.eta_h = eta_h
+            self.eta_m = eta_m
+            updated = True
+        if self.draught==0 or draught!=0:
+            self.draught = draught
+            updated = True
+        if self.destination=='' or destination!='':
+            self.destination = destination
+            updated = True
+        if updated:
+            self.timestamp_5 = timestamp
+            self.source_5 = source
+        return updated
+
+    def to_values(self):
+        return self.timestamp_5, self.imo, self.name, self.callsign, self.type, self.dim_bow, self.dim_stern, self.dim_port, self.dim_starboard, self.eta_M, self.eta_D, self.eta_h, self.eta_m, self.draught, self.destination, self.source_5
+
+    def from_record(self, record):
+        values = struct.unpack(aivdm_record5_format, record)
+        Nmea5.__init__(self, *values)
+
+    @staticmethod
+    def new_from_record(record):
+        values = struct.unpack(aivdm_record5_format, record)
+        return Nmea5(*values)
+
+    def to_record(self):
+        return struct.pack(aivdm_record5_format, *Nmea5.to_values(self))
+        
+    def from_file(self, file):
+        record = file.read(aivdm_record5_length)
+        Nmea5.from_record(self, record)
+
+    @staticmethod
+    def new_from_file(file):
+        record = file.read(aivdm_record5_length)
+        return Nmea5.new_from_record(record)
+
+    def from_lastinfo(self, strmmsi):
+        filename_nmea5 = DBPATH+'/last/'+hash3_pathfilename(strmmsi+'.nmea5')
+        try:
+            f = file(filename_nmea5, 'rb')
+        except IOError:
+            logging.debug("file %s doesn't exists" % filename_nmea5)
+            return
+        lockf(f, LOCK_SH)
+        Nmea5.from_file(self, f)
+        f.close()
+
+    @staticmethod
+    def new_from_lastinfo(strmmsi):
+        filename_nmea5 = DBPATH+'/last/'+hash3_pathfilename(strmmsi+'.nmea5')
+        try:
+            f = file(filename_nmea5, 'rb')
+        except IOError:
+            logging.debug("file %s doesn't exists" % filename_nmea5)
+            return None
+        lockf(f, LOCK_SH)
+        record = f.read(aivdm_record5_length)
+        f.close()
+        return Nmea5.new_from_record(record)
+
+    @staticmethod
+    def _clean_str(txt):
+        if txt is None:
+            return ''
+        return txt.replace('\0','').replace('@', '').strip()
+
+    def get_name(self, default='Unknown'):
+        result = self._clean_str(self.name)
+        if result:
+            return result
+        return default
+
+    def get_callsign(self, default='Unknown'):
+        return self._clean_str(self.callsign) or default
+
+    def get_shiptype(self, default='Unknown'):
+        return SHIP_TYPES.get(self.type, default)
+
+    def get_length(self):
+        return self.dim_bow + self.dim_stern
+
+    def get_width(self):
+        return self.dim_port + self.dim_starboard
+
+    _monthes='Jan,Feb,Mar,Apr,May,Jun,Jul,Aug,Sep,Oct,Nov,Dec'.split(',')
+    def get_eta_str(self, default='Unknown'):
+        if not self.eta_M and not self.eta_D:
+            return default
+        result = ''
+        if self.eta_M:
+            if self.eta_M<= len(Nmea5._monthes):
+                result += Nmea5._monthes[self.eta_M-1]
+            else:
+                result += '%02d' % self.eta_M
+        else:
+            result += '***'
+        result += ' '
+        if self.eta_D:
+            result += '%02d' % self.eta_D
+        else:
+            result += '**'
+        if self.eta_h != 24:
+            result += ' %02d' % self.eta_h
+            if self.eta_m == 60:
+                result += 'h'
+            else:
+                result += ':%02d' % self.eta_m
+        return result
+    
+    def get_draught_str(self, default='Unknown'):
+        if not self.draught:
+            return default
+        return '%.1f meters' % (self.draught/10.)
+
+    def get_destination(self, default='Unknown'):
+        return self._clean_str(self.destination) or default
+
+    def get_source_5_str(self):
+        return Nmea.format_source(self.source_5)
+
+class Nmea(Nmea1, Nmea5):
+    """
+    This is nmea info, a merge of nmea1 and nmea5 packets
+    """
+    def __init__(self, strmmsi):
+        self.strmmsi = strmmsi
+        Nmea1.__init__(self, timestamp=0)
+        Nmea5.__init__(self, timestamp=0)
+
+    ########################
+    # Because of multiple inheritance some functions are unavailable:
+    def _nmea_not_implemented(*args, **kargs):
+        # used to avoid conflicting inherited members
+        raise NotImplementedError
+    from_values = _nmea_not_implemented
+    to_values = _nmea_not_implemented
+    from_record = _nmea_not_implemented
+    new_from_record = _nmea_not_implemented
+    to_record = _nmea_not_implemented
+    from_file = _nmea_not_implemented
+    new_from_file = _nmea_not_implemented
+    ########################
+
+    def from_lastinfo(self, strmmsi):
+        Nmea1.from_lastinfo(self, strmmsi)
+        Nmea5.from_lastinfo(self, strmmsi)
+    
+    @staticmethod
+    def new_from_lastinfo(strmmsi):
+        # better than unimplemented, but not optimal
+        nmea = Nmea(strmmsi)
+        nmea.from_lastinfo(strmmsi)
+        return nmea
+
+
+    def get_flag(self, default=u'Unknown'):
+        if self.strmmsi.startswith('00') and self.strmmsi[3:5]!='MI':
+            ref_mmsi = self.strmmsi[2:]
+        else:
+            ref_mmsi = self.strmmsi
+        country_mid = int(ref_mmsi[0:3])
+        country_name = COUNTRIES_MID.get(country_mid, default)
+        return country_name
+
+    def get_mmsi_public(self, default='Unknown'):
+        if self.strmmsi.isdigit():
+            return self.strmmsi
+        return default
+
+    def get_title(self):
+        """
+        Returns the name of the ship if available
+        Or its mmsi
+        """
+        return self.get_name(None) or self.get_mmsi_public()
+
+    def get_last_timestamp(self):
+        if self.timestamp_1 > self.timestamp_5:
+            return self.timestamp_1
+        else:
+            return self.timestamp_5
+
+    def get_last_updated_str(self):
+        lastupdate = self.get_last_timestamp()
+        if lastupdate == 0:
+            return u'Never'
+        dt_lastupdate = datetime.utcfromtimestamp(lastupdate)
+        delta = datetime.utcnow() - dt_lastupdate
+        def nice_timedelta_str(delta):
+            strdelta = ''
+            if delta.days:
+                strdelta += str(delta.days)
+                if delta.days > 1:
+                    strdelta += ' days '
+                else:
+                    strdelta += ' day '
+            delta_s = delta.seconds
+            delta_m = delta_s / 60
+            delta_s -= delta_m * 60
+            delta_h = delta_m / 60
+            delta_m -= delta_h * 60
+
+            if delta_h:
+                strdelta += str(delta_h)
+                if delta_h > 1:
+                    strdelta += ' hours '
+                else:
+                    strdelta += ' hour '
+            if delta_m:
+                strdelta += str(delta_m)
+                if delta_m > 1:
+                    strdelta += ' minutes '
+                else:
+                    strdelta += ' minute '
+            if delta_s:
+                strdelta += str(delta_s)
+                if delta_s > 1:
+                    strdelta += ' seconds '
+                else:
+                    strdelta += ' second '
+            if not strdelta:
+                strdelta = 'less than a second '
+            strdelta += ' ago'
+            return strdelta
+        return nice_timedelta_str(delta) + ' (' + dt_lastupdate.strftime('%Y-%m-%d %H:%M:%S GMT') + ')'
+
+    @staticmethod
+    def format_source(infosrc):
+        if infosrc=='\0\0\0\0':
+            return u'(empty)'
+        elif infosrc.startswith('MI'):
+            if len(infosrc)==4:
+                return _get_mi_sourcename(struct.unpack('<2xH', infosrc)[0])
+            else:
+                return u'Manual input'
+        elif infosrc.startswith('U'):
+            return u'User input'
+        elif infosrc.startswith('NM'):
+            return u'NMEA packets from '+xml_escape(infosrc[2:])
+        elif infosrc.startswith('SP'):
+            return u"ShipPlotter user %s" % infosrc[2:]
+        elif infosrc == u'MTWW':
+            return u'MarineTraffic.com web site'
+        elif infosrc == u'MTTR':
+            return u'MarineTraffic.com track files'
+        else:
+            return infosrc
+
+    csv_headers = [
+        'mmsi',
+        'flag',
+        'name',
+        'imo',
+        'callsign',
+        'type',
+        'length',
+        'width',
+        'datetime',
+        'status',
+        'sog',
+        'latitude',
+        'longitude',
+        'cog',
+        'heading',
+        'destination',
+        'eta',
+        'draught',
+        ]
+
+    def get_dump_row(self):
+        result = []
+        def _clean(txt):
+            if txt is None:
+                return ''
+            return txt.replace('\0','').replace('@', '').strip()
+        result.append(self.strmmsi)
+        country_mid = int(self.strmmsi[:3])
+        country_name = COUNTRIES_MID.get(country_mid, u'unknown')
+        result.append(country_name.encode('utf-8'))
+        result.append(_clean(self.name))
+        result.append(str(self.imo))
+        result.append(_clean(self.callsign))
+        result.append(str(self.type) + '-' + SHIP_TYPES.get(self.type, 'unknown'))
+        d = self.dim_bow + self.dim_stern
+        if d:
+            result.append(d)
+        else:
+            result.append(None)
+        d = self.dim_port + self.dim_starboard
+        if d:
+            result.append(d)
+        else:
+            result.append(None)
+        result.append(datetime.utcfromtimestamp(self.timestamp_1).strftime('%Y-%m-%dT%H:%M:%SZ'))
+        result.append(STATUS_CODES.get(self.status, 'unknown'))
+        if self.sog!=AIS_SOG_NOT_AVAILABLE:
+            result.append(str(self.sog/AIS_SOG_SCALE))
+        else:
+            result.append(None)
+        if self.latitude != AIS_LAT_NOT_AVAILABLE:
+            result.append(str(self.latitude/AIS_LATLON_SCALE))
+        else:
+            result.append(None)
+        if self.longitude != AIS_LON_NOT_AVAILABLE:
+            result.append(str(self.longitude/AIS_LATLON_SCALE))
+        else:
+            result.append(None)
+        if self.cog != AIS_COG_NOT_AVAILABLE:
+            result.append(str(self.cog/10.))
+        else:
+            result.append(None)
+        if self.heading != AIS_NO_HEADING:
+            result.append(str(self.heading))
+        else:
+            result.append(None)
+        result.append(self.get_destination(''))
+        result.append(self.get_eta_str(''))
+        result.append(self.draught)
+        result.append(self.source_5)
+        return result
+
+
+class BankNmea1(list):
+    """
+    That class handle a .nmea1 file
+    """
+    def __init__(self, strmmsi, dt):
+        list.__init__(self)
+        self.strmmsi = strmmsi
+        if isinstance(dt, date):
+            dt = dt.strftime('%Y%m%d')
+        self.date = dt
+
+    def get_filename(self):
+        return os.path.join(DBPATH, 'bydate', self.date, hash3_pathfilename(self.strmmsi+'.nmea1'))
+
+    def __load_from_file(self, file):
+        '''
+        Adds all record from opened file in this bank
+        File must be locked before call
+        '''
+        while True:
+            record = file.read(aivdm_record123_length)
+            if not record:
+                break
+            self.append(Nmea1.new_from_record(record))
+
+    def _write_in_file(self, file):
+        '''
+        Write all records from that bank in opened file
+        File must be locked before call
+        File should be truncated after call
+        '''
+        for nmea1 in self:
+            file.write(nmea1.to_record())
+
+    def __load(self):
+        try:
+            file = open(self.get_filename(), 'rb')
+            lockf(file, LOCK_SH)
+        except IOError, ioerr:
+            if ioerr.errno==2: # No file
+                return
+            raise
+        self.__load_from_file(file)
+        file.close()
+        
+    def __iter__(self):
+        """
+        Each call reload the file
+        """
+        self.__load()
+        self.sort_by_date_reverse()
+        return list.__iter__(self)
+
+    def packday(remove_manual_input=False):
+        #print "MMSI", strmmsi
+
+        self = BankNmea1(self.strmmsi, self.date)
+        filename = self.get_filename()
+        try:
+            file = open(filename, 'r+b') # read/write binary
+        except IOError, ioerr:
+            if ioerr.errno!=2: # No file
+                raise
+            return self # no data
+        lockf(file, LOCK_EX)
+        self.__load_from_file(file)
+        self.sort_by_date()
+
+        file_has_changed = False
+        file_must_be_unlinked = False
+
+        #print "PACKING..."
+        file_has_changed = self.remove_duplicate_timestamp() or file_has_changed
+
+        if remove_manual_input:
+            #print "REMOVING MANUAL INPUT..."
+            file_has_changed = self.remove_manual_input() or file_has_changed
+
+        if file_has_changed:
+            file.seek(0)
+            self._write_in_file(file)
+            file.truncate()
+            if file.tell() == 0:
+                file_must_be_unlinked = True
+
+        file.close()
+        
+        if file_must_be_unlinked:
+            # FIXME we release the lock before unlinking
+            # another process might encounter an empty file (not handled)
+            logging.warning('file was truncated to size 0. unlinking')
+            os.unlink(filename) # we have the lock (!)
+
+    def dump_to_stdout(self):
+        for nmea1 in self:
+            nmea1.dump_to_stdout()
+
+    def sort_by_date(self):
+        self.sort(lambda n1, n2: n1.timestamp_1 - n2.timestamp_1)
+
+    def sort_by_date_reverse(self):
+        self.sort(lambda n1, n2: n2.timestamp_1 - n1.timestamp_1)
+
+    def remove_duplicate_timestamp(self):
+        file_has_changed = False
+        if len(self)<=1:
+            return file_has_changed
+        last_timestamp = self[0].timestamp_1
+        i = 1
+        while i<len(self):
+            if self[i].timestamp_1 == last_timestamp:
+                del self[i]
+                file_has_changed = True
+            else:
+                last_timestamp = self[i].timestamp_1
+                i += 1
+        return file_has_changed
+        
+    def remove_manual_input(self):
+        file_has_changed = False
+        i = 0
+        while i<len(self):
+            if self[i].source_1[:2]=='MI':
+                del self[i]
+                file_has_changed = True
+            else:
+                i += 1
+        return file_has_changed
+
+class Nmea1Feeder:
+    """
+    Yields all nmea1 packets between two given datetimes
+    in REVERSE order (recent information first)
+    """
+    def __init__(self, strmmsi, datetime_end, datetime_begin=None, max_count=0):
+        self.strmmsi = strmmsi
+        assert datetime_end is not None
+        self.datetime_end = datetime_end
+        self.datetime_begin = datetime_begin or DB_STARTDATE
+        self.max_count = max_count
+
+    def __iter__(self):
+        dt_end = self.datetime_end
+        d_end = dt_end.date()
+        ts_end = datetime_to_timestamp(dt_end)
+        if self.datetime_begin:
+            dt_begin = self.datetime_begin
+            d_begin = dt_begin.date()
+            ts_begin = datetime_to_timestamp(dt_begin)
+        else:
+            dt_begin = None
+            d_begin = None
+            ts_begin = None
+
+        d = d_end
+        count = 0
+        while True:
+            if d_begin is not None and d < d_begin:
+                return
+            bank = BankNmea1(self.strmmsi, d)
+            for nmea1 in bank:
+                if ts_begin is not None and nmea1.timestamp_1 < ts_begin:
+                    return
+                if nmea1.timestamp_1 > ts_end:
+                    continue
+                
+                yield nmea1
+               
+                count += 1
+                if self.max_count and count >= self.max_count:
+                    return
+            d += timedelta(-1)
+
+
+class BankNmea5(list):
+    """
+    That class handle a .nmea5 file
+    """
+    def __init__(self, strmmsi, dt):
+        list.__init__(self)
+        self.strmmsi = strmmsi
+        if isinstance(dt, date):
+            try:
+                dt = dt.strftime('%Y%m%d')
+            except ValueError:
+                logging.critical('dt=%s', dt)
+                raise
+        self.date = dt
+
+    def get_filename(self):
+        return os.path.join(DBPATH, 'bydate', self.date, hash3_pathfilename(self.strmmsi+'.nmea5'))
+
+    def __load_from_file(self, file):
+        '''
+        Adds all record from opened file in this bank
+        File must be locked before call
+        '''
+        while True:
+            record = file.read(aivdm_record5_length)
+            if not record:
+                break
+            self.append(Nmea5.new_from_record(record))
+
+    def _write_in_file(self, file):
+        '''
+        Write all records from that bank in opened file
+        File must be locked before call
+        File should be truncated after call
+        '''
+        for nmea5 in self:
+            file.write(nmea5.to_record())
+
+    def __load(self):
+        try:
+            file = open(self.get_filename(), 'rb')
+            lockf(file, LOCK_SH)
+        except IOError, ioerr:
+            if ioerr.errno==2: # No file
+                return
+            raise
+        self.__load_from_file(file)
+        file.close()
+        
+    def __iter__(self):
+        """
+        Each call reload the file
+        """
+        self.__load()
+        self.sort_by_date_reverse()
+        return list.__iter__(self)
+
+    def sort_by_date(self):
+        self.sort(lambda n1, n2: n1.timestamp_5 - n2.timestamp_5)
+
+    def sort_by_date_reverse(self):
+        self.sort(lambda n1, n2: n2.timestamp_5 - n1.timestamp_5)
+
+class Nmea5Feeder:
+    """
+    Yields all nmea5 packets between two given datetimes
+    in REVERSE order (recent information first)
+    """
+    def __init__(self, strmmsi, datetime_end, datetime_begin=None, max_count=0):
+        self.strmmsi = strmmsi
+        assert datetime_end is not None
+        self.datetime_end = datetime_end
+        self.datetime_begin = datetime_begin or DB_STARTDATE
+        self.max_count = max_count
+
+    def __iter__(self):
+        dt_end = self.datetime_end
+        d_end = dt_end.date()
+        ts_end = datetime_to_timestamp(dt_end)
+        if self.datetime_begin:
+            dt_begin = self.datetime_begin
+            d_begin = dt_begin.date()
+            ts_begin = datetime_to_timestamp(dt_begin)
+        else:
+            dt_begin = None
+            d_begin = None
+            ts_begin = None
+
+        d = d_end
+        count = 0
+        while True:
+            if d_begin is not None and d < d_begin:
+                return
+            bank = BankNmea5(self.strmmsi, d)
+            for nmea1 in bank:
+                if ts_begin is not None and nmea1.timestamp_5 < ts_begin:
+                    return
+                if nmea1.timestamp_5 > ts_end:
+                    continue
+                
+                yield nmea1
+               
+                count += 1
+                if self.max_count and count >= self.max_count:
+                    return
+            d += timedelta(-1)
+
+
+class NmeaFeeder:
+    def __init__(self, strmmsi, datetime_end, datetime_begin=None, filters=[], granularity=1, max_count=None):
+        if granularity<=0:
+            logging.warning('Granularity=%d generates duplicate entries', granularity)
+        self.strmmsi = strmmsi
+        assert datetime_end is not None
+        self.datetime_end = datetime_end
+        self.datetime_begin = datetime_begin or DB_STARTDATE
+        self.filters = filters
+        self.granularity = granularity
+        self.max_count = max_count
+
+    def __iter__(self):
+        nmea = Nmea(self.strmmsi)
+        if self.datetime_begin:
+            nmea5_datetime_begin = self.datetime_begin - timedelta(30) # go back up to 30 days to get a good nmea5 packet
+        else:
+            nmea5_datetime_begin = None
+        nmea5_iterator = Nmea5Feeder(self.strmmsi, self.datetime_end, nmea5_datetime_begin).__iter__()
+        nmea5 = Nmea5(self.strmmsi, sys.maxint)
+
+        count = 0
+        lasttimestamp = sys.maxint
+        for nmea1 in Nmea1Feeder(self.strmmsi, self.datetime_end, self.datetime_begin):
+            Nmea1.from_values(nmea, *nmea1.to_values())
+            
+            # try to get an nmea5 paket older
+            nmea5_updated = False
+            while nmea5 is not None and nmea5.timestamp_5 > nmea1.timestamp_1:
+                try:
+                    nmea5 = nmea5_iterator.next()
+                    nmea5_updated = True
+                except StopIteration:
+                    nmea5 = None
+            
+            if nmea5_updated and nmea5 is not None:
+                Nmea5.merge_from_values(nmea, *nmea5.to_values())
+
+            filtered_out = False
+            for is_ok in self.filters:
+                if not is_ok(nmea):
+                    filtered_out = True
+                    break
+            if filtered_out:
+                continue
+
+            if nmea.timestamp_1 <= lasttimestamp - self.granularity:
+                yield nmea
+                count += 1
+                if self.max_count and count >= self.max_count:
+                    return
+                lasttimestamp = nmea.timestamp_1
+
+
+def all_mmsi_generator():
+    for dirname, dirs, fnames in os.walk(os.path.join(DBPATH, 'last')):
+        for fname in fnames:
+            if fname[-6:]=='.nmea1':
+                yield fname[:-6]
+
+
+def load_fleet_to_uset(fleetname):
+    result = []
+    sqlexec(u"SELECT mmsi FROM fleet_vessel WHERE fleet=%(fleetname)s", {'fleetname': fleetname})
+    c = get_common_cursor()
+    while True:
+        row=c.fetchone()
+        if not row:
+            break
+        mmsi = row[0]
+        result .append(mmsi_to_strmmsi(mmsi))
+    logging.debug('fleet=%s', result)
+    return result
+
+
+def filter_area(nmea, area):
+    if nmea.latitude == AIS_LAT_NOT_AVAILABLE or nmea.longitude == AIS_LON_NOT_AVAILABLE:
+        return False
+    if not area.contains((nmea.latitude/AIS_LATLON_SCALE, nmea.longitude/AIS_LATLON_SCALE)):
+        return False
+    return True
+
+def filter_knownposition(nmea):
+    # we are filtering out latitude=0 and longitude=0, that is not supposed to be necessary...
+    return nmea.latitude != AIS_LAT_NOT_AVAILABLE and nmea.longitude != AIS_LON_NOT_AVAILABLE and nmea.latitude != 0 and nmea.longitude != 0
+
+
+_filter_positioncheck_last_mmsi = None
+def filter_speedcheck(nmea, max_mps):
+    """
+    mps is miles per seconds
+    """
+    global _filter_positioncheck_last_mmsi
+    global _filter_positioncheck_last_time
+    global _filter_positioncheck_last_time_failed
+    global _filter_positioncheck_last_lat
+    global _filter_positioncheck_last_lon
+    global _filter_positioncheck_error_count
+    if nmea.strmmsi != _filter_positioncheck_last_mmsi:
+        _filter_positioncheck_last_time = None
+        _filter_positioncheck_last_mmsi = nmea.strmmsi
+        _filter_positioncheck_error_count = 0
+    if _filter_positioncheck_last_time is not None:
+        seconds = _filter_positioncheck_last_time - nmea.timestamp_1
+        distance = dist3_latlong_ais((_filter_positioncheck_last_lat, _filter_positioncheck_last_lon), (nmea.latitude, nmea.longitude))
+        if seconds:
+            speed = distance/seconds
+            if speed > max_mps:
+                if _filter_positioncheck_error_count < 10:
+                    logging.debug("Ignoring point: distance = %s, time = %s, speed = %s kt, source = %s", distance, seconds, distance/seconds*3600, repr(nmea.source_1))
+                    if _filter_positioncheck_error_count == 0 or _filter_positioncheck_last_time_failed != nmea.timestamp_1:
+                        _filter_positioncheck_error_count += 1
+                        _filter_positioncheck_last_time_failed = nmea.timestamp_1
+                    return False
+                else:
+                    logging.warning("Discontinous position accepted after too many failures: %.2f nm in %s s (%.0f kt), source = %s", distance, seconds, distance/seconds*3600, repr(nmea.source_1))
+            _filter_positioncheck_error_count = 0
+    _filter_positioncheck_last_time = nmea.timestamp_1
+    _filter_positioncheck_last_lat = nmea.latitude
+    _filter_positioncheck_last_lon = nmea.longitude
+    return True
+
+
+if __name__ == '__main__':
+    from optparse import OptionParser, OptionGroup
+
+    parser = OptionParser(usage='%prog [options] { mmsi | @fleet }+ | all')
+
+    parser.add_option('-d', '--debug',
+        action='store_true', dest='debug', default=False,
+        help="debug mode")
+
+    parser.add_option('-e', '--end',
+        action='store', dest='sdt_end', metavar="'YYYYMMDD HHMMSS'",
+        help="End data processing on that GMT date time. Default is now. If a date is provided without time, time default to 235959.")
+    parser.add_option('-s', '--start',
+        action='store', dest='sdt_start', metavar="'YYYYMMDD HHMMSS'",
+        help="Start data processing on that date. Using that option enables multiple output of the same boat. Disabled by default. If a date is provided without time, time default to 000000. If other options enable multiple output, default to 1 day before --end date/time.")
+    parser.add_option('-g', '--granularity',
+        action='store', type='int', dest='granularity', metavar='SECONDS',
+        help="Dump only one position every granularity seconds. Using that option enables multiple output of the same boat. If other options enable multiple output, defaults to 600 (10 minutes)")
+    parser.add_option('--max',
+        action='store', type='int', dest='max_count', metavar='NUMBER',
+        help="Dump a maximum of NUMBER positions every granularity seconds. Using that option enables multiple output of the same boat.")
+
+    parser.add_option('--filter-knownposition',
+        action='store_true', dest='filter_knownposition', default=False,
+        help="Eliminate unknown positions from results.")
+
+    parser.add_option('--filter-speedcheck',
+        action='store', type='int', dest='speedcheck', default=200, metavar='KNOTS',
+        help="Eliminate erroneaous positions from results, based on impossible speed.")
+
+    parser.add_option('--filter-type',
+        action='append', type='int', dest='type_list', metavar="TYPE",
+        help="process a specific ship type.")
+    parser.add_option('--help-types',
+        action='store_true', dest='help_types', default=False,
+        help="display list of available types")
+
+    parser.add_option('--filter-area',
+        action='store', type='str', dest='area_file', metavar="FILE.KML",
+        help="only process a specific area as defined in a kml polygon file.")
+
+    parser.add_option('--filter-destination',
+        action='store', type='str', dest='filter_destination', metavar="DESTINATION",
+        help="Only print ships with that destination.")
+
+    parser.add_option('--no-headers',
+        action='store_false', dest='csv_headers', default=True,
+        help="skip CSV headers")
+    #
+
+    expert_group = OptionGroup(parser, "Expert Options",
+        "You normaly don't need any of these")
+
+    expert_group.add_option('--db',
+        action='store', dest='db', default=DBPATH,
+        help="path to filesystem database. Default=%default")
+
+    expert_group.add_option('--debug-sql',
+        action='store_true', dest='debug_sql', default=False,
+        help="print all sql queries to stdout before running them")
+
+    expert_group.add_option('--action',
+        choices=('dump', 'removemanual', 'mmsidump', 'nirgaldebug', 'fixdestination'), default='dump',
+        help='Possible values are:\n'
+            'dump: dump values in csv format. This is the default.\n'
+            'removemanual: Delete Manual Input entries from the database.\n'
+            'mmsidump: Dump mmsi')
+    parser.add_option_group(expert_group)
+
+    (options, args) = parser.parse_args()
+
+
+    if options.help_types:
+        print "Known ship types:"
+        keys = SHIP_TYPES.keys()
+        keys.sort()
+        for k in keys:
+            print k, SHIP_TYPES[k]
+        sys.exit(0)
+
+    DBPATH = options.db
+
+    if options.debug:
+        loglevel = logging.DEBUG
+    else:
+        loglevel = logging.INFO
+    logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s')
+
+    if options.debug_sql:
+        sql_setdebug(True)
+
+    #
+    # Ships selections
+    #
+
+    if len(args)==0:
+        print >> sys.stderr, "No ship to process"
+        sys.exit(1)
+
+    target_mmsi_iterator = []
+    all_targets = False
+    for arg in args:
+        if arg=='all':
+            all_targets = True
+        elif arg.startswith('@'):
+            target_mmsi_iterator += load_fleet_to_uset(arg[1:])
+        else:
+            target_mmsi_iterator.append(arg)
+    if all_targets:
+        if target_mmsi_iterator:
+            logging.warning('Selecting all ships, ignoring other arguments')
+        target_mmsi_iterator = all_mmsi_generator()
+
+    #
+    # Dates selections
+    #
+
+    if options.sdt_end:
+        # remove non digit characters
+        options.sdt_end = "".join([ c for c in options.sdt_end if c.isdigit()])
+        if len(options.sdt_end)==14:
+            dt_end = datetime.strptime(options.sdt_end, '%Y%m%d%H%M%S')
+        elif len(options.sdt_end)==8:
+            dt_end = datetime.strptime(options.sdt_end, '%Y%m%d')
+            dt_end = datetime.combine(dt_end.date(), time(23,59,59))
+        else:
+            print >> sys.stderr, "Invalid format for --end option"
+            sys.exit(1)
+    else:
+        dt_end = datetime.utcnow()
+    logging.debug('--end is %s', dt_end)
+
+    if options.sdt_start or options.granularity is not None or options.max_count:
+        # time period is enabled
+        if options.sdt_start:
+            options.sdt_start = "".join([ c for c in options.sdt_start if c.isdigit()])
+            if len(options.sdt_start)==14:
+                dt_start = datetime.strptime(options.sdt_start, '%Y%m%d%H%M%S')
+            elif len(options.sdt_start)==8:
+                dt_start = datetime.strptime(options.sdt_start, '%Y%m%d')
+            else:
+                print >> sys.stderr, "Invalid format for --start option"
+                sys.exit(1)
+        else:
+            dt_start = dt_end - timedelta(1)
+        if options.granularity is None:
+            options.granularity = 600
+    else:
+        dt_start = None
+        options.max_count = 1
+        if options.granularity is None:
+            options.granularity = 600
+    logging.debug('--start is %s', dt_start)
+
+    #
+    # Filters
+    #
+
+    filters=[]
+    
+    if options.filter_knownposition:
+        filters.append(filter_knownposition)
+
+    if options.speedcheck != 0:
+        maxmps = options.speedcheck / 3600. # from knots to NM per seconds
+        filters.append(lambda nmea: filter_speedcheck(nmea, maxmps))
+
+    if options.area_file:
+        area = load_area_from_kml_polygon(options.area_file)
+        filters.append(lambda nmea: filter_area(nmea, area))
+    
+    if options.type_list:
+        def filter_type(nmea):
+            return nmea.type in options.type_list
+        filters.append(filter_type)
+
+    if options.filter_destination:
+        filters.append(lambda nmea: nmea.destination.startswith(options.filter_destination))
+
+    #
+    # Processing
+    #
+
+    if options.action=='dump':
+        output = csv.writer(sys.stdout)
+        if options.csv_headers:
+            output.writerow(Nmea.csv_headers)
+        for mmsi in target_mmsi_iterator:
+            logging.debug('Considering %s', repr(mmsi))
+            assert dt_end is not None
+            for nmea in NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count):
+                output.writerow(nmea.get_dump_row())
+
+    elif options.action=='removemanual':
+        if filters:
+            print >> sys.stderr, "removemanual action doesn't support filters"
+            sys.exit(1)
+
+        for dt in dates:
+            logging.info("Processing date %s", dt)
+            for mmsi in target_mmsi_iterator:
+                BankNmea1(mmsi, dt).packday(remove_manual_input=True)
+    
+    elif options.action=='mmsidump':
+        for strmmsi in target_mmsi_iterator :
+            print strmmsi
+    elif options.action=='fixdestination':
+        for mmsi in target_mmsi_iterator:
+            for nmea in NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count):
+                destination = nmea.destination.rstrip(' @\0')
+                if destination:
+                    sqlexec(u'UPDATE vessel SET destination = %(destination)s WHERE mmsi=%(mmsi)s AND destination IS NULL', {'mmsi':strmmsi_to_mmsi(mmsi), 'destination':destination})
+                    logging.info('%s -> %s', mmsi, repr(destination))
+                    dbcommit()
+                    break # go to next mmsi
+
diff --git a/bin/area.py b/bin/area.py
new file mode 100644 (file)
index 0000000..92a1a77
--- /dev/null
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8
+
+__all__ = [ 'Area', 'load_area_from_kml_polygon' ]
+
+import sys
+
+class Area:
+    """
+    That class defines an area (on the Earth)
+    It provides testing whether a point is inside or not
+    """
+    def __init__(self, points=[]):
+        self.points = []
+        for p in points:
+            self.addpoint(p)
+    def addpoint(self, point):
+        self.points.append(point)
+        if len(self.points)==1:
+            self.min = point
+            self.max = point
+            return
+        if point[0] < self.min[0]:
+            self.min = (point[0], self.min[1])
+        elif point[0] > self.max[0]:
+            self.max = (point[0], self.max[1])
+        if point[1] < self.min[1]:
+            self.min = (self.min[0], point[1])
+        elif point[1] > self.max[1]:
+            self.max = (self.max[0], point[1])
+
+    def check(self):
+        for point in self.points:
+            if not self.contains(point):
+                return False
+        return True
+
+    def contains(self, point):
+        if not self.points:
+            return False
+        # first test the bounding box
+        #if point[0] < self.min[0] \
+        # or point[0] > self.max[0] \
+        # or point[1] < self.min[1] \
+        # or point[1] > self.max[1] :
+        #    return False
+        for i in range(len(self.points)):
+            p1 = self.points[i]
+            x1, y1 = p1
+            p2 = self.points[(i+1)%len(self.points)]
+            x2, y2 = p2
+            xa = point[0] - x1
+            ya = point[1] - y1
+            xb = x2 - x1
+            yb = y2 - y1
+            if xa * yb < xb * ya:
+                return False
+        return True
+
+def load_area_from_kml_polygon(filename):
+    file = open(filename)
+    coordinates_lines = [ line for line in file.readlines() if '</coordinates>' in line ]
+    if len(coordinates_lines) != 1:
+        print >> sys.stderr, 'There should be exactly one line with coordinates in', filename
+        sys.exit(1)
+    coordinates = coordinates_lines[0].replace('</coordinates>', '').replace('\n', '').replace('\r', '')
+    coordinates = [ xyz for xyz in coordinates.split(' ') if xyz ]
+    if coordinates[0] != coordinates[-1]:
+        print >> sys.stderr, 'First and last coordinates of', filename, 'should be the same'
+        print >> sys.stderr, coordinates[0]
+        print >> sys.stderr, coordinates[-1]
+        sys.exit(1)
+    assert len(coordinates)>3, 'polygon should have 3 edges minimum'
+    
+    area = Area()
+    for xyz in coordinates[0:-1]:
+        x,y,z = xyz.split(',')
+        area.addpoint((float(y),float(x)))
+    assert area.check(), 'Polygon should be counter-clockwise and convex.'
+    return area
+
+
+#if __name__ == '__main__':
+# counter clock-wise : Positive
+#pelagos = Area([
+#    (42.91, 12.5),
+#    (45.3612930132714, 10.01843703552244),
+#    (43.6,5.5),
+#    (40.57,8.6)
+#    ])
+#for p in [
+#    (42,9),
+#    (41,5),
+#    (40,12),
+#    (45,13),
+#    (45,7),
+#    ]:
+#    print "testing", p
+#    if pelagos.contains(p):
+#        print "inside"
+#    else:
+#        print"outside"
+
+
diff --git a/bin/db.py.SAMPLE b/bin/db.py.SAMPLE
new file mode 100644 (file)
index 0000000..4df57ac
--- /dev/null
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+import psycopg2, psycopg2.extensions
+
+DUMP_SQL_QUERIES = False
+
+__db = None
+def get_common_db():
+    global __db
+    if not __db:
+        psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
+        __db=psycopg2.connect("dbname=ais host=localhost user=FIXME password=FIXME")
+        __db.set_client_encoding('UTF8')
+    return __db
+
+__cursor = None
+def get_common_cursor():
+    global __cursor
+    if not __cursor:
+        __cursor = get_common_db().cursor()
+    return __cursor
+
+def sql_setdebug(b):
+    global DUMP_SQL_QUERIES
+    DUMP_SQL_QUERIES = b
+
+def sqlexec(sql, *args, **kargs):
+    cursor = get_common_cursor()
+    if DUMP_SQL_QUERIES:
+        print cursor.mogrify(sql.encode('utf8'), *args, **kargs)
+    cursor.execute(sql, *args, **kargs)
+
+def dbcommit():
+    get_common_db().commit()
diff --git a/bin/dj.py b/bin/dj.py
new file mode 100755 (executable)
index 0000000..73dfdaf
--- /dev/null
+++ b/bin/dj.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+from django.core.management import execute_manager
+try:
+    from djais import settings
+except ImportError:
+    import sys
+    sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
+    sys.exit(1)
+
+if __name__ == "__main__":
+    execute_manager(settings)
diff --git a/bin/djais/__init__.py b/bin/djais/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/bin/djais/basicauth.py b/bin/djais/basicauth.py
new file mode 100644 (file)
index 0000000..beeebf4
--- /dev/null
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+#
+#Example usage:
+#def auth(username, password):
+#    return (username,password)==('me', 'secret')
+# FIX: should return User object
+#
+#@http_authenticate(auth, 'myrealm')
+#def myview(request):
+#    return HttpResponse("Hello world!")
+
+from django.http import *
+import base64
+
+
+class HttpResponseAuthenticate(HttpResponse):
+    """ Http response that trigger a basic http authentification on the client.
+        parameter: http realm (string)"""
+    def __init__(self, *args, **kwargs):
+        realm = 'DOL'
+        if 'realm' in kwargs:
+            realm =  kwargs['realm']
+            del kwargs['realm']
+        HttpResponse.__init__(self, *args, **kwargs)
+        self.status_code = 401
+        self.mimetype = "text/html; charset=utf-8"
+        if '"' in realm:
+            raise Exception("Invalid realm \""+realm+"\" violates RFC1945")
+        self['WWW-Authenticate'] = 'Basic realm="'+realm+'"'
+
+
+class http_authenticate:
+    """ Decorator that check authorization.
+        Parameters:
+            passwd_checker(username,password): function that must return True if the username is recognised.
+            realm: string with the realm. See rfc1945.
+    """
+    def __init__(self, passwd_checker, realm):
+        self.passwd_checker = passwd_checker
+        self.realm = realm
+
+    def __call__(self, func):
+        def _wrapper(*args, **kwargs):
+            request = args[0]
+            if not 'HTTP_AUTHORIZATION' in request.META:
+                username, password = "", ""
+                if not self.passwd_checker(username, password):
+                    return HttpResponseAuthenticate("Password required", realm=self.realm)
+            else:
+                auth = request.META['HTTP_AUTHORIZATION']
+                assert auth.startswith('Basic '), "Invalid authentification scheme"
+                username, password = base64.decodestring(auth[len('Basic '):]).split(':', 2)
+                user =  self.passwd_checker(username, password)
+                if not user:
+                    return HttpResponseAuthenticate("Invalid username/password", realm=self.realm)
+            request.user = user
+            return func(*args, **kwargs)
+
+        _wrapper.__name__ = func.__name__
+        return _wrapper
diff --git a/bin/djais/models.py b/bin/djais/models.py
new file mode 100644 (file)
index 0000000..0dea2e7
--- /dev/null
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+from django.db import models
+from django.contrib.auth.models import get_hexdigest
+from ais import Nmea, mmsi_to_strmmsi
+
+class User(models.Model):
+    id = models.AutoField(primary_key=True)
+    login = models.CharField(max_length=16, unique=True)
+    password_hash = models.CharField(max_length=75)
+    name = models.CharField(max_length=50)
+    email = models.EmailField()
+    father = models.ForeignKey('User')
+    creation_datetime = models.DateTimeField(auto_now_add=True)
+    class Meta:
+        db_table = u'user'
+        ordering = ('id',)
+
+    def __unicode__(self):
+        return self.login
+
+    def set_password(self, raw_password):
+        import random
+        algo = 'sha1'
+        salt = get_hexdigest(algo, str(random.random()), str(random.random()))[:5]
+        hsh = get_hexdigest(algo, salt, raw_password)
+        self.password_hash = '%s$%s$%s' % (algo, salt, hsh)
+
+    def check_password(self, raw_password):
+        algo, salt, hsh = self.password_hash.split('$')
+        return hsh == get_hexdigest(algo, salt, raw_password)
+
+    def get_and_delete_messages(self):
+        return None
+
+    def is_admin_by(self, user_id):
+        if self.id == user_id:
+            return True
+        if self.father_id is None:
+            return False
+        return self.father.is_admin_by(user_id)
+
+class Vessel(models.Model):
+    mmsi = models.IntegerField(primary_key=True)
+    name = models.CharField(max_length=20)
+    imo = models.IntegerField()
+    callsign = models.CharField(max_length=7)
+    type = models.IntegerField()
+    destination = models.CharField(max_length=20)
+    updated = models.DateTimeField()
+    source = models.CharField(max_length=8)
+    class Meta:
+        db_table = u'vessel'
+    def __unicode__(self):
+        return unicode(self.mmsi) # FIXME
+    def get_last_nmea(self):
+        strmmsi = mmsi_to_strmmsi(self.mmsi)
+        return Nmea.new_from_lastinfo(strmmsi)
+
+class Fleet(models.Model):
+    name = models.CharField(max_length=50, primary_key=True)
+    vessel = models.ManyToManyField(Vessel, through='FleetVessel')
+    description = models.TextField()
+    class Meta:
+        db_table = u'fleet'
+    def __unicode__(self):
+        return self.name
+    def vessel_count(self):
+        return FleetVessel.objects.filter(fleet=self.name).count()
+    def user_count(self):
+        return FleetUser.objects.filter(fleet=self.name).count()
+
+class FleetUser(models.Model):
+    id = models.AutoField(primary_key=True)
+    fleet = models.ForeignKey(Fleet, db_column='fleet', to_field='name')
+    user = models.ForeignKey(User)
+    class Meta:
+        db_table = u'fleet_user'
+
+class FleetVessel(models.Model):
+    id = models.AutoField(primary_key=True)
+    fleet = models.ForeignKey(Fleet, db_column='fleet', to_field='name')
+    vessel = models.ForeignKey(Vessel, db_column='mmsi', to_field='mmsi')
+    class Meta:
+        db_table = u'fleet_vessel'
+    
+## manual input source
+#class MiSource(models.Model):
+#    id = models.IntegerField(primary_key=True)
+#    userid = models.IntegerField()
+#    name = models.TextField(unique=True)
+#    class Meta:
+#        db_table = u'mi_source'
+#
+## manual input vessel
+#class MiVessel(models.Model):
+#    mmsi_txt = models.TextField(primary_key=True) # This field type is a guess.
+#    class Meta:
+#        db_table = u'mi_vessel'
+
+
+# Plane plotter
+#class Ppuser(models.Model):
+#    usr = models.TextField(primary_key=True) # This field type is a guess.
+#    lat = models.FloatField()
+#    lon = models.FloatField()
+#    class Meta:
+#        db_table = u'ppuser'
+#
+#class Plane(models.Model):
+#    flight = models.CharField(max_length=8)
+#    reg = models.CharField(max_length=8)
+#    ads = models.CharField(max_length=8)
+#    type = models.CharField(max_length=4)
+#    usr = models.TextField() # This field type is a guess.
+#    updated = models.DateTimeField()
+#    class Meta:
+#        db_table = u'plane'
+
diff --git a/bin/djais/settings.py.SAMPLE b/bin/djais/settings.py.SAMPLE
new file mode 100644 (file)
index 0000000..ef7e4ff
--- /dev/null
@@ -0,0 +1,82 @@
+# Django settings for ais project.
+
+DEBUG = True
+TEMPLATE_DEBUG = DEBUG
+
+ADMINS = (
+    # ('Your Name', 'your_email@domain.com'),
+)
+
+MANAGERS = ADMINS
+
+DATABASE_ENGINE = 'postgresql_psycopg2'           # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
+DATABASE_NAME = 'ais'             # Or path to database file if using sqlite3.
+DATABASE_USER = 'FIXME'             # Not used with sqlite3.
+DATABASE_PASSWORD = 'FIXME'         # Not used with sqlite3.
+DATABASE_HOST = 'localhost'             # Set to empty string for localhost. Not used with sqlite3.
+DATABASE_PORT = ''             # Set to empty string for default. Not used with sqlite3.
+
+# Local time zone for this installation. Choices can be found here:
+# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
+# although not all choices may be available on all operating systems.
+# If running in a Windows environment this must be set to the same as your
+# system time zone.
+TIME_ZONE = 'Europe/Paris'
+
+# Language code for this installation. All choices can be found here:
+# http://www.i18nguy.com/unicode/language-identifiers.html
+LANGUAGE_CODE = 'en-gb'
+
+SITE_ID = 1
+
+# If you set this to False, Django will make some optimizations so as not
+# to load the internationalization machinery.
+USE_I18N = True
+
+# Absolute path to the directory that holds media.
+# Example: "/home/media/media.lawrence.com/"
+MEDIA_ROOT = ''
+
+# URL that handles the media served from MEDIA_ROOT. Make sure to use a
+# trailing slash if there is a path component (optional in other cases).
+# Examples: "http://media.lawrence.com", "http://example.com/media/"
+MEDIA_URL = ''
+
+# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
+# trailing slash.
+# Examples: "http://foo.com/media/", "/media/".
+ADMIN_MEDIA_PREFIX = '/media/'
+
+# Make this unique, and don't share it with anybody.
+SECRET_KEY = 'FIXME'
+
+# List of callables that know how to import templates from various sources.
+TEMPLATE_LOADERS = (
+    'django.template.loaders.filesystem.load_template_source',
+    'django.template.loaders.app_directories.load_template_source',
+#     'django.template.loaders.eggs.load_template_source',
+)
+
+MIDDLEWARE_CLASSES = (
+#    'django.middleware.common.CommonMiddleware',
+#    'django.contrib.sessions.middleware.SessionMiddleware',
+#    'django.contrib.auth.middleware.AuthenticationMiddleware',
+)
+
+#ROOT_URLCONF = 'ais2.djais.urls'
+ROOT_URLCONF = 'djais.urls'
+
+TEMPLATE_DIRS = (
+    # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
+    # Always use forward slashes, even on Windows.
+    # Don't forget to use absolute paths, not relative paths.
+    '/home/nirgal/kod/ais/html_templates',
+)
+
+INSTALLED_APPS = (
+#    'django.contrib.auth',
+#    'django.contrib.contenttypes',
+#    'django.contrib.sessions',
+#    'django.contrib.sites',
+    'djais',
+)
diff --git a/bin/djais/templatetags/__init__.py b/bin/djais/templatetags/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/bin/djais/templatetags/ais_extras.py b/bin/djais/templatetags/ais_extras.py
new file mode 100644 (file)
index 0000000..0b59f30
--- /dev/null
@@ -0,0 +1,9 @@
+# -*- encofing: utf8 -*-
+from django import template
+from ais import mmsi_to_strmmsi
+
+register = template.Library()
+
+@register.filter
+def sqlmmsi_to_strmmsi(txt):
+    return unicode(mmsi_to_strmmsi(txt))
diff --git a/bin/djais/urls.py b/bin/djais/urls.py
new file mode 100644 (file)
index 0000000..7749632
--- /dev/null
@@ -0,0 +1,40 @@
+from django.conf.urls.defaults import *
+import djais
+
+# Uncomment the next two lines to enable the admin:
+# from django.contrib import admin
+# admin.autodiscover()
+
+urlpatterns = patterns('',
+    (r'^$', 'djais.views.index'),
+    (r'^fleet/$', 'djais.views.fleets'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/$', 'djais.views.fleet'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/list$', 'djais.views.fleet_vessels'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add$', 'djais.views.fleet_vessel_add'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add_vessel$', 'djais.views.fleet_vessel_add2'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/users$', 'djais.views.fleet_users'),
+    (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/lastpos$', 'djais.views.fleet_lastpos'),
+    (r'^vessel/$', 'djais.views.vessel_search'),
+    (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/$', 'djais.views.vessel'),
+    (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/manual_input$', 'djais.views.vessel_manual_input'),
+    (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/track$', 'djais.views.vessel_track'),
+    (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/animation$', 'djais.views.vessel_animation'),
+    (r'^user/$', 'djais.views.users'),
+    (r'^user/(?P<login>[a-zA-Z0-9_]+)/$', 'djais.views.user_detail'),
+    (r'^user/(?P<login>[a-zA-Z0-9_]+)/edit$', 'djais.views.user_edit'),
+    (r'^user/add$', 'djais.views.user_edit', {'login':None} ),
+    (r'^user/(?P<login>[a-zA-Z0-9_]+)/change_password$', 'djais.views.user_change_password'),
+    (r'^user/(?P<login>[a-zA-Z0-9_]+)/delete$', 'djais.views.user_delete'),
+    (r'^source/$', 'djais.views.sources'),
+    (r'^logout$', 'djais.views.logout'),
+
+    # Example:
+    # (r'^ais2/', include('ais2.foo.urls')),
+
+    # Uncomment the admin/doc line below and add 'django.contrib.admindocs' 
+    # to INSTALLED_APPS to enable admin documentation:
+    # (r'^admin/doc/', include('django.contrib.admindocs.urls')),
+
+    # Uncomment the next line to enable the admin:
+    # (r'^admin/(.*)', admin.site.root),
+)
diff --git a/bin/djais/views.py b/bin/djais/views.py
new file mode 100644 (file)
index 0000000..ddc539a
--- /dev/null
@@ -0,0 +1,688 @@
+# -*- coding: utf-8 -*-
+
+import os
+from datetime import *
+from time import time as ctime
+import crack
+import struct
+import rrdtool
+from django.http import *
+from django.template import loader, RequestContext
+from django import forms
+from django.shortcuts import render_to_response, get_object_or_404
+from django.db import IntegrityError
+
+from decoratedstr import remove_decoration
+
+from basicauth import http_authenticate
+from models import *
+from show_targets_ships import *
+from ais import Nmea, NmeaFeeder, strmmsi_to_mmsi, SHIP_TYPES, STATUS_CODES, AIS_STATUS_NOT_AVAILABLE, AIS_ROT_NOT_AVAILABLE, AIS_LATLON_SCALE, AIS_LON_NOT_AVAILABLE, AIS_LAT_NOT_AVAILABLE, AIS_COG_SCALE, AIS_COG_NOT_AVAILABLE, AIS_NO_HEADING, AIS_SOG_SCALE, AIS_SOG_NOT_AVAILABLE, AIS_SOG_MAX_SPEED, add_nmea1, add_nmea5_partial, load_fleet_to_uset
+from ntools import datetime_to_timestamp, clean_ais_charset
+
+STATS_DIR = '/var/lib/ais/stats'
+
+def auth(username, raw_password):
+    try:
+        user = User.objects.get(login=username)
+    except User.DoesNotExist:
+        return None
+    if not user.check_password(raw_password):
+        return None
+    return user
+
+
+@http_authenticate(auth, 'ais')
+def index(request):
+    return render_to_response('index.html', {}, RequestContext(request))
+
+
+class VesselSearchForm(forms.Form):
+    mmsi = forms.CharField(max_length=9, required=False)
+    name = forms.CharField(max_length=20, required=False)
+    imo = forms.IntegerField(required=False)
+    callsign = forms.CharField(max_length=7, required=False)
+    destination = forms.CharField(max_length=20, required=False)
+    def clean(self):
+        cleaned_data = self.cleaned_data
+        for value in cleaned_data.values():
+            if value:
+                return cleaned_data
+        raise forms.ValidationError("You must enter at least one criteria")
+
+
+
+@http_authenticate(auth, 'ais')
+def vessel_search(request):
+    if request.method == 'POST' or request.META['QUERY_STRING']:
+        form = VesselSearchForm(request.REQUEST)
+        if form.is_valid():
+            data = form.cleaned_data
+            vessels = Vessel.objects
+            if data['mmsi']:
+                vessels = vessels.filter(mmsi=strmmsi_to_mmsi(data['mmsi']))
+            if data['name']:
+                vessels = vessels.filter(name__contains=data['name'].upper())
+            if data['imo']:
+                vessels = vessels.filter(imo=data['imo'])
+            if data['callsign']:
+                vessels = vessels.filter(callsign__contains=data['callsign'].upper())
+            if data['destination']:
+                vessels = vessels.filter(destination__contains=data['destination'].upper())
+            return render_to_response('vessels.html', {'vessels': vessels}, RequestContext(request))
+    else: # GET
+        form = VesselSearchForm()
+
+    return render_to_response('vessel_index.html', {'form': form}, RequestContext(request))
+
+@http_authenticate(auth, 'ais')
+def vessel(request, strmmsi):
+    os.chdir('/home/nirgal/ais.nirgal.com/ais/') # FIXME
+    mmsi = strmmsi_to_mmsi(strmmsi)
+    vessel = get_object_or_404(Vessel, pk=mmsi)
+    nmea = Nmea.new_from_lastinfo(strmmsi)
+    #if not nmea.timestamp_1 and not nmea.timestamp_5:
+    #    raise Http404
+    return render_to_response('vessel.html', {'nmea': nmea}, RequestContext(request))
+
+
+class VesselManualInputForm(forms.Form):
+    timestamp = forms.DateTimeField(label=u'When', help_text=u'When was the observation made in GMT. Use YYYY-MM-DD HH:MM:SS format')
+    imo = forms.IntegerField(required=False, min_value=1000000, max_value=9999999)
+    name = forms.CharField(max_length=20, required=False)
+    callsign = forms.CharField(max_length=7, required=False)
+    type = forms.TypedChoiceField(required=False, choices = [ kv for kv in SHIP_TYPES.iteritems() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=0, initial=0)
+    status = forms.TypedChoiceField(required=False, choices = [ kv for kv in STATUS_CODES.iteritems() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=AIS_STATUS_NOT_AVAILABLE, initial=AIS_STATUS_NOT_AVAILABLE)
+    sog = forms.FloatField(label='Speed', help_text='Over ground, in knots', required=False, min_value=0, max_value=AIS_SOG_MAX_SPEED/AIS_SOG_SCALE)
+    latitude = forms.CharField(required=False)
+    longitude = forms.CharField(required=False)
+    cog = forms.FloatField(label='Course', help_text='Over ground', required=False, min_value=0.0, max_value=359.9)
+    heading = forms.IntegerField(required=False, min_value=0, max_value=359)
+
+    @staticmethod
+    def _clean_ais_charset(ustr):
+        ustr = remove_decoration(ustr) # benign cleaning, but can increase size (œ->oe)
+        ustr = ustr.upper() # benign cleaning
+        str = clean_ais_charset(ustr.encode('ascii', 'replace'))
+        if unicode(str) != ustr:
+            raise forms.ValidationError('Invalid character: AIS alphabet is @ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^- !"#$%&\'()*+,-./0123456789:;<=>?')
+        return str
+
+    def clean_timestamp(self):
+        data = self.cleaned_data['timestamp']
+        if data is None:
+            return None
+        if data < datetime.utcnow() - timedelta(365):
+            raise forms.ValidationError('Date is too much is the past.')
+        if data > datetime.utcnow():
+            raise forms.ValidationError('Date is be in the future. This form is only for observed results.')
+        return datetime_to_timestamp(data)
+
+    def clean_imo(self):
+        data = self.cleaned_data['imo']
+        if data is None:
+            return 0
+        return data
+
+    def clean_name(self):
+        name = self.cleaned_data['name']
+        if name is None:
+            return ''
+        name = VesselManualInputForm._clean_ais_charset(name)
+        if len(name)>20:
+            raise forms.ValidationError('Ensure this value has at most 20 characters (it has %s).' % len(name))
+        return name
+
+    def clean_callsign(self):
+        callsign = self.cleaned_data['callsign']
+        if callsign is None:
+            return ''
+        callsign = VesselManualInputForm._clean_ais_charset(callsign)
+        if len(callsign)>7:
+            raise forms.ValidationError('Ensure this value has at most 7 characters (it has %s).' % len(callsign))
+        return callsign
+
+    def clean_sog(self):
+        sog = self.cleaned_data['sog']
+        if sog is None:
+            return AIS_SOG_NOT_AVAILABLE
+        return int(sog*AIS_SOG_SCALE)
+
+    def clean_latitude(self):
+        data = self.cleaned_data['latitude']
+        data = data.replace(u"''", u'"') # commong mistake
+        data = data.replace(u' ', u'') # remove spaces
+        sides = u'SN'
+        if not data:
+            return AIS_LAT_NOT_AVAILABLE
+        tmp, side = data[:-1], data[-1]
+        if side == sides[0]:
+            side = -1
+        elif side == sides[1]:
+            side = 1
+        else:
+            raise forms.ValidationError(u'Last character must be either %s or %s.' % (sides[0], sides[1]))
+        spl = tmp.split(u'°')
+        if len(spl) == 1:
+            raise forms.ValidationError(u'You need to use the ° character.')
+        d, tmp = spl
+        try:
+            d = float(d)
+        except ValueError:
+            raise forms.ValidationError(u'Degrees must be an number. It\'s %s.' % d)
+        spl = tmp.split(u"'", 1)
+        if len(spl) == 1:
+            # no ' sign: ok only if there is nothing but the side after °
+            # we don't accept seconds if there is no minutes:
+            # It might be an entry mistake
+            tmp = spl[0]
+            if len(tmp) == 0:
+                m = s = 0
+            else:
+                raise forms.ValidationError(u'You must use the \' character between ° and %s.' % data[-1])
+        else:
+            m, tmp = spl
+            try:
+                m = float(m)
+            except ValueError:
+                raise forms.ValidationError(u'Minutes must be an number. It\'s %s.' % m)
+            if len(tmp) == 0:
+                s = 0
+            else:
+                if tmp[-1] != '"':
+                    raise forms.ValidationError(u'You must use the " character between seconds and %s.' % data[-1])
+                s = tmp[:-1]
+                try:
+                    s = float(s)
+                except ValueError:
+                    raise forms.ValidationError(u'Seconds must be an number. It\'s %s.' % s)
+        data = side * ( d + m / 60 + s / 3600)
+
+        if data < -90 or data > 90:
+            raise forms.ValidationError(u'%s in not in -90..90 range' % data)
+        return int(data * AIS_LATLON_SCALE)
+
+    def clean_longitude(self):
+        data = self.cleaned_data['longitude']
+        data = data.replace(u"''", u'"') # commong mistake
+        data = data.replace(u' ', u'') # remove spaces
+        sides = u'WE'
+        if not data:
+            return AIS_LON_NOT_AVAILABLE
+        tmp, side = data[:-1], data[-1]
+        if side == sides[0]:
+            side = -1
+        elif side == sides[1]:
+            side = 1
+        else:
+            raise forms.ValidationError(u'Last character must be either %s or %s.' % (sides[0], sides[1]))
+        spl = tmp.split(u'°')
+        if len(spl) == 1:
+            raise forms.ValidationError(u'You need to use the ° character.')
+        d, tmp = spl
+        try:
+            d = float(d)
+        except ValueError:
+            raise forms.ValidationError(u'Degrees must be an number. It\'s %s.' % d)
+        spl = tmp.split(u"'", 1)
+        if len(spl) == 1:
+            # no ' sign: ok only if there is nothing but the side after °
+            # we don't accept seconds if there is no minutes:
+            # It might be an entry mistake
+            tmp = spl[0]
+            if len(tmp) == 0:
+                m = s = 0
+            else:
+                raise forms.ValidationError(u'You must use the \' character between ° and %s.' % data[-1])
+        else:
+            m, tmp = spl
+            try:
+                m = float(m)
+            except ValueError:
+                raise forms.ValidationError(u'Minutes must be an number. It\'s %s.' % m)
+            if len(tmp) == 0:
+                s = 0
+            else:
+                if tmp[-1] != '"':
+                    raise forms.ValidationError(u'You must use the " character between seconds and %s.' % data[-1])
+                s = tmp[:-1]
+                try:
+                    s = float(s)
+                except ValueError:
+                    raise forms.ValidationError(u'Seconds must be an number. It\'s %s.' % s)
+        data = side * ( d + m / 60 + s / 3600)
+
+        if data < -180 or data > 180:
+            raise forms.ValidationError(u'%s in not in -180..180 range' % data)
+        return int(data * AIS_LATLON_SCALE)
+
+    def clean_cog(self):
+        data = self.cleaned_data['cog']
+        if data is None:
+            return AIS_COG_NOT_AVAILABLE
+        return int(data * AIS_COG_SCALE)
+    
+    def clean_heading(self):
+        #raise forms.ValidationError(u'clean_heading called')
+        data = self.cleaned_data['heading']
+        if data is None:
+            return AIS_NO_HEADING
+        return data
+
+    def clean(self):
+        cleaned_data = self.cleaned_data
+        if (cleaned_data.get('latitude', AIS_LAT_NOT_AVAILABLE) == AIS_LAT_NOT_AVAILABLE ) ^ ( cleaned_data.get('longitude', AIS_LON_NOT_AVAILABLE) == AIS_LON_NOT_AVAILABLE):
+            raise forms.ValidationError('It makes no sense to enter just a latitude or a longitude. Enter both or none.')
+        if cleaned_data.get('latitude', AIS_LAT_NOT_AVAILABLE) == AIS_LAT_NOT_AVAILABLE:
+            if cleaned_data.get('status', AIS_STATUS_NOT_AVAILABLE) != AIS_STATUS_NOT_AVAILABLE:
+                raise forms.ValidationError('It makes no sense to enter a status without coordinates. Please enter latitute and longitude too.')
+            if cleaned_data.get('sog', AIS_SOG_NOT_AVAILABLE) != AIS_SOG_NOT_AVAILABLE:
+                raise forms.ValidationError('It makes no sense to enter a speed without coordinates. Please enter latitute and longitude too.')
+            if cleaned_data.get('cog', AIS_COG_NOT_AVAILABLE) != AIS_COG_NOT_AVAILABLE:
+                raise forms.ValidationError('It makes no sense to enter a course without coordinates. Please enter latitute and longitude too.')
+            if cleaned_data.get('heading', AIS_NO_HEADING) != AIS_NO_HEADING:
+                raise forms.ValidationError('It makes no sense to enter a heading without coordinates. Please enter latitute and longitude too.')
+
+        if cleaned_data.get('timestamp', None) \
+        and cleaned_data.get('imo', 0) == 0 \
+        and cleaned_data.get('name', '') == '' \
+        and cleaned_data.get('callsign', '') == '' \
+        and cleaned_data.get('type', 0) == 0 \
+        and cleaned_data.get('status', AIS_STATUS_NOT_AVAILABLE) == AIS_STATUS_NOT_AVAILABLE \
+        and cleaned_data.get('sog', AIS_SOG_NOT_AVAILABLE) == AIS_SOG_NOT_AVAILABLE \
+        and cleaned_data.get('latitude', AIS_LAT_NOT_AVAILABLE) == AIS_LAT_NOT_AVAILABLE \
+        and cleaned_data.get('longitude', AIS_LON_NOT_AVAILABLE) == AIS_LON_NOT_AVAILABLE \
+        and cleaned_data.get('cog', AIS_COG_NOT_AVAILABLE) == AIS_COG_NOT_AVAILABLE \
+        and cleaned_data.get('heading', AIS_NO_HEADING) == AIS_NO_HEADING:
+            raise forms.ValidationError("You must enter some data, beside when.")
+        return cleaned_data
+
+@http_authenticate(auth, 'ais')
+def vessel_manual_input(request, strmmsi):
+    strmmsi = strmmsi.encode('utf-8')
+    nmea = Nmea.new_from_lastinfo(strmmsi)
+    if request.method == 'POST' or request.META['QUERY_STRING']:
+        form = VesselManualInputForm(request.REQUEST)
+        if form.is_valid():
+            data = form.cleaned_data
+            source = 'U' +  struct.pack('<I', request.user.id)[0:3]
+            result = u''
+            if data['imo'] != 0 \
+            or data['name'] != '' \
+            or data['callsign'] != '' \
+            or data['type'] != 0:
+                toto = (strmmsi, data['timestamp'], data['imo'], data['name'], data['callsign'], data['type'], 0,0,0,0, 0,0,24,60, 0, '', source)
+                result += 'UPDATING NMEA 5: '+repr(toto)+'<br>'
+                add_nmea5_partial(*toto)
+            if data['status'] != AIS_STATUS_NOT_AVAILABLE \
+            or data['sog'] != AIS_SOG_NOT_AVAILABLE \
+            or data['latitude'] != AIS_LAT_NOT_AVAILABLE \
+            or data['longitude'] != AIS_LON_NOT_AVAILABLE \
+            or data['cog'] != AIS_COG_NOT_AVAILABLE \
+            or data['heading'] != AIS_NO_HEADING:
+                
+                toto = (strmmsi, data['timestamp'], data['status'], AIS_ROT_NOT_AVAILABLE, data['sog'], data['latitude'], data['longitude'], data['cog'], data['heading'], source)
+                result += 'UPDATING NMEA 1: '+repr(toto)+'<br>'
+                add_nmea1(*toto)
+            return HttpResponse('Not fully implemented: '+repr(data) + '<br>' + result)
+    else: # GET
+        form = VesselManualInputForm()
+    return render_to_response('vessel_manual_input.html', {'form': form, 'nmea': nmea}, RequestContext(request))
+
+@http_authenticate(auth, 'ais')
+def vessel_track(request, strmmsi):
+    ndays = request.REQUEST.get('ndays', 90)
+    try:
+        ndays = int(ndays)
+    except ValueError:
+        ndays = 90
+    grain = request.REQUEST.get('grain', 3600)
+    try:
+        grain = int(grain)
+    except ValueError:
+        grain = 3600
+    os.chdir('/home/nirgal/ais.nirgal.com/ais/') # FIXME
+    date_end = datetime.utcnow()
+    date_start = date_end - timedelta(ndays)
+    nmea_iterator = NmeaFeeder(strmmsi, date_end, date_start, granularity=grain)
+    value = kml_to_kmz(format_boat_track(nmea_iterator))
+    response = HttpResponse(value, mimetype="application/vnd.google-earth.kml")
+    response['Content-Disposition'] = 'attachment; filename=%s.kmz' % strmmsi
+    return response
+
+
+@http_authenticate(auth, 'ais')
+def vessel_animation(request, strmmsi):
+    ndays = request.REQUEST.get('ndays', 90)
+    try:
+        ndays = int(ndays)
+    except ValueError:
+        ndays = 90
+    grain = request.REQUEST.get('grain', 3600)
+    try:
+        grain = int(grain)
+    except ValueError:
+        grain = 3600
+    os.chdir('/home/nirgal/ais.nirgal.com/ais/') # FIXME
+    date_end = datetime.utcnow()
+    date_start = date_end - timedelta(ndays)
+    nmea_iterator = NmeaFeeder(strmmsi, date_end, date_start, granularity=grain)
+    value = kml_to_kmz(format_boat_intime(nmea_iterator))
+    response = HttpResponse(value, mimetype="application/vnd.google-earth.kml")
+    response['Content-Disposition'] = 'attachment; filename=%s.kmz' % strmmsi
+    return response
+
+
+@http_authenticate(auth, 'ais')
+def fleets(request):
+    fleetusers = request.user.fleetuser_set.all()
+    return render_to_response('fleets.html', {'fleetusers':fleetusers}, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def fleet(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+    return render_to_response('fleet.html', {'fleet':fleet}, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def fleet_vessels(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+    vessels = fleet.vessel.all()
+    return render_to_response('fleet_vessels.html', {'fleet':fleet, 'vessels': vessels}, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def fleet_vessel_add(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+    strmmsi = request.REQUEST['mmsi']
+    mmsi = strmmsi_to_mmsi(strmmsi)
+    try:
+        vessel = Vessel.objects.get(pk=mmsi)
+    except Vessel.DoesNotExist:
+        return HttpResponse('No such vessel')
+    try:
+        FleetVessel(fleet=fleet, vessel=vessel).save()
+    except IntegrityError:
+        return HttpResponse('Integrity error: Is the ship already in that fleet?')
+    return HttpResponse('Done')
+
+
+class FleetAddVessel(forms.Form):
+    mmsi = forms.CharField(help_text=u'Enter one MMSI per line', required=False, widget=forms.Textarea)
+    #name = forms.CharField(max_length=20, required=False)
+    #imo = forms.IntegerField(required=False)
+    #callsign = forms.CharField(max_length=7, required=False)
+    #destination = forms.CharField(max_length=20, required=False)
+    def clean(self):
+        cleaned_data = self.cleaned_data
+        for value in cleaned_data.values():
+            if value:
+                return cleaned_data
+        raise forms.ValidationError("You must enter at least one criteria")
+
+@http_authenticate(auth, 'ais')
+def fleet_vessel_add2(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+    if request.method == 'POST' or request.META['QUERY_STRING']:
+        form = FleetAddVessel(request.REQUEST)
+        if form.is_valid():
+            data = form.cleaned_data
+            result = []
+            a_strmmsi = request.REQUEST['mmsi']
+            if a_strmmsi:
+                for strmmsi in a_strmmsi.split('\n'):
+                    strmmsi = strmmsi.strip('\r')
+                    if not strmmsi:
+                        continue
+                    try:
+                        sqlmmsi = strmmsi_to_mmsi(strmmsi)
+                    except AssertionError:
+                        result.append('Invalid mmsi %s' % strmmsi)
+                        continue
+                    try:
+                        vessel = Vessel.objects.get(pk=sqlmmsi)
+                    except Vessel.DoesNotExist:
+                        result.append('No vessel with MMSI '+strmmsi)
+                        continue
+                    try:
+                        fv = FleetVessel.objects.get(fleet=fleet, vessel=vessel)
+                        result.append('Vessel with MMSI %s is already in that fleet' % strmmsi)
+                    except FleetVessel.DoesNotExist:
+                        FleetVessel(fleet=fleet, vessel=vessel).save()
+                        result.append('Vessel with MMSI %s added' % strmmsi)
+
+            return HttpResponse('<br>'.join(result))
+    else: # GET
+        form = FleetAddVessel()
+
+    return render_to_response('fleet_vessel_add.html', {'form': form, 'fleet': fleet}, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def fleet_users(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+
+    message = u''
+    if request.method == 'POST' or request.META['QUERY_STRING']:
+        action = request.REQUEST['action']
+        userlogin = request.REQUEST['user']
+        try:
+            user = User.objects.get(login=userlogin)
+        except User.DoesNotExist:
+            message = u'User %s does not exist.' % userlogin
+        else:
+            if action == u'add':
+                try:
+                    fu = FleetUser.objects.get(fleet=fleet, user=user)
+                    message = u'User %s already has access.' % user.login
+                except FleetUser.DoesNotExist:
+                    FleetUser(fleet=fleet, user=user).save()
+                    message = u'Granted access to user %s.' % user.login
+            elif action == u'revoke':
+                try:
+                    fu = FleetUser.objects.get(fleet=fleet, user=user)
+                    fu.delete()
+                    message = u'Revoked access to user %s.' % user.login
+                except FleetUser.DoesNotExist:
+                    message = u'User %s didn\'t have access.' % user.login
+            else:
+                message = u'Unknown action %s' % action
+
+    fleetusers = fleet.fleetuser_set.all()
+    otherusers = User.objects.exclude(id__in=[fu.user.id for fu in fleetusers]).order_by('name')
+    return render_to_response('fleet_users.html', {'fleet':fleet, 'fleetusers': fleetusers, 'otherusers': otherusers, 'message': message }, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def fleet_lastpos(request, fleetname):
+    fleet = get_object_or_404(Fleet, pk=fleetname)
+    if not FleetUser.objects.filter(fleet=fleetname, user=request.user.id).all():
+        return HttpResponseForbidden('<h1>Forbidden</h1>')
+    fleet_uset = load_fleet_to_uset(fleetname)
+    # = set([mmsi_to_strmmsi(vessel.mmsi) for vessel in fleet.vessel.all()])
+    os.chdir('/home/nirgal/ais.nirgal.com/ais/') # FIXME
+    value = kml_to_kmz(format_fleet(fleet_uset, document_name=fleetname+' fleet').encode('utf-8'))
+    response = HttpResponse(value, mimetype="application/vnd.google-earth.kml")
+    response['Content-Disposition'] = 'attachment; filename=%s.kmz' % fleetname
+    return response
+
+
+@http_authenticate(auth, 'ais')
+def users(request):
+    users = User.objects.all()
+    for user in users:
+        user.admin_ok = user.is_admin_by(request.user.id)
+    return render_to_response('users.html', {'users':users}, RequestContext(request))
+
+
+class UserEditForm(forms.Form):
+    login = forms.RegexField(regex=r'^[a-zA-Z0-9_]+$', max_length=16,
+        error_message ='Login must only contain letters, numbers and underscores')
+    name = forms.CharField(max_length=50)
+    email = forms.EmailField()
+    def __init__(self, *args, **kargs):
+        forms.Form.__init__(self, *args, **kargs)
+        self.old_login = kargs['initial']['login']
+    def clean_login(self):
+        new_login = self.cleaned_data['login']
+        if new_login != self.old_login:
+            if  User.objects.filter(login=new_login).count():
+                raise forms.ValidationError("Sorry that login is already in use. Try another one.")
+        return new_login
+
+@http_authenticate(auth, 'ais')
+def user_detail(request, login):
+    user = get_object_or_404(User, login=login)
+    user.admin_ok = user.is_admin_by(request.user.id)
+    return render_to_response('user_detail.html', {'auser': user}, RequestContext(request))
+
+@http_authenticate(auth, 'ais')
+def user_edit(request, login):
+    initial = {}
+    if login:
+        user = get_object_or_404(User, login=login)
+        if not user.is_admin_by(request.user.id):
+            return HttpResponseForbidden('403 Forbidden')
+    else:
+        user = User()
+        user.father_id = request.user.id;
+    initial['login'] = user.login
+    initial['name'] = user.name
+    initial['email'] = user.email
+    if request.method == 'POST':
+        form = UserEditForm(request.POST, initial=initial)
+        if form.is_valid():
+            user.login = form.cleaned_data['login']
+            user.name = form.cleaned_data['name']
+            user.email = form.cleaned_data['email']
+            user.save()
+            return HttpResponseRedirect('/user/')
+    else: # GET
+        form = UserEditForm(initial=initial)
+
+    return render_to_response('user_edit.html', {'form':form, 'auser': user}, RequestContext(request))
+
+
+class ChangePasswordForm(forms.Form):
+    new_password = forms.CharField(max_length=16, widget=forms.PasswordInput())
+    new_password_check = forms.CharField(max_length=16, widget=forms.PasswordInput())
+    def clean_generic_password(self, field_name):
+        password = self.cleaned_data[field_name]
+        try:
+            crack.FascistCheck(password)
+        except ValueError, err:
+            raise forms.ValidationError(err.message)
+        return password
+
+    def clean_new_password(self):
+        return self.clean_generic_password('new_password')
+    def clean_new_password_check(self):
+        return self.clean_generic_password('new_password_check')
+    def clean(self):
+        cleaned_data = self.cleaned_data
+        pass1 = cleaned_data.get('new_password')
+        pass2 = cleaned_data.get('new_password_check')
+        if pass1 != pass2:
+            self._errors['new_password_check'] = forms.util.ErrorList(['Passwords check must match'])
+            del cleaned_data['new_password_check']
+        return cleaned_data
+
+
+@http_authenticate(auth, 'ais')
+def user_change_password(request, login):
+    user = get_object_or_404(User, login=login)
+    if not user.is_admin_by(request.user.id):
+        return HttpResponseForbidden('403 Forbidden')
+    if request.method == 'POST':
+        form = ChangePasswordForm(request.POST)
+        if form.is_valid():
+            user.set_password(form.cleaned_data['new_password'])
+            user.save()
+            return HttpResponseRedirect('/user/')
+    else: # GET
+        form = ChangePasswordForm()
+    return render_to_response('user_change_password.html', {'form':form, 'auser':user}, RequestContext(request))
+
+
+@http_authenticate(auth, 'ais')
+def user_delete(request, login):
+    user = get_object_or_404(User, login=login)
+    if not user.is_admin_by(request.user.id):
+        return HttpResponseForbidden('403 Forbidden')
+    if request.REQUEST.get('confirm', None):
+        user.delete()
+        return HttpResponseRedirect('/user/')
+    return render_to_response('user_delete.html', {'form':None, 'auser':user}, RequestContext(request))
+
+
+def logout(request):
+    # TODO
+    return HttpResponse('Not implemented')
+    #response = render_to_response('logout.html', {}, RequestContext(request))
+    #return response
+
+@http_authenticate(auth, 'ais')
+def sources(request):
+    os.chdir('/home/nirgal/ais.nirgal.com/ais/') # FIXME
+    png_stat_dir = '/home/nirgal/ais.nirgal.com/www/stats'
+    sources = ( 'NMMT', 'NMKT', 'NMRW', 'NMNZ', 'NMEZ', 'NMAS' )
+    now = int(ctime())
+    periods = ({
+        'name_tiny': '2h',
+        'name_long': '2 hours',
+        'seconds': 2*60*60
+        }, {
+        'name_tiny': '6h',
+        'name_long': '6 hours',
+        'seconds': 6*60*60,
+        }, {
+        'name_tiny': '2d',
+        'name_long': '2 days',
+        'seconds': 2*24*60*60
+        #}, {
+        #'name_tiny': '7d',
+        #'name_long': '1 week',
+        #'seconds': 7*24*60*60
+        })
+        
+    for source in sources:
+        for period in periods:
+            args = os.path.join(STATS_DIR, source+'-'+period['name_tiny']+'-bytes.png'), \
+                '--lazy', \
+                '-l', '0', \
+                '--title', source+' - Bandwidth usage - '+period['name_long'], \
+                '--start', '%d' % (now-period['seconds']), \
+                '--end', '%d' % now, \
+                '--vertical-label', 'bps', \
+                'DEF:bytes=%s:bytes:AVERAGE' % os.path.join(STATS_DIR, source+'.rrd'), \
+                'DEF:rawbytes=%s:rawbytes:AVERAGE' % os.path.join(STATS_DIR, source+'.rrd'), \
+                'CDEF:bits=bytes,8,*', \
+                'CDEF:rawbits=rawbytes,8,*', \
+                'LINE:bits#00FF00:IP payload', \
+                'LINE:rawbits#FF0000:IP with headers'
+            rrdtool.graph(*args)
+            args = os.path.join(STATS_DIR, source+'-'+period['name_tiny']+'-counts.png'), \
+                '--lazy', \
+                '-l', '0', \
+                '--title', source+' - Packet\'izer stats - '+period['name_long'], \
+                '--start', '%d' % (now-period['seconds']), \
+                '--end', '%d' % now, \
+                '--vertical-label', 'Hz', \
+                'DEF:packets=%s:packets:AVERAGE' % os.path.join(STATS_DIR, source+'.rrd'), \
+                'DEF:lines=%s:lines:AVERAGE' % os.path.join(STATS_DIR, source+'.rrd'), \
+                'LINE:packets#FF0000:input packets', \
+                'LINE:lines#00FF00:AIVDM lines'
+            rrdtool.graph(*args)
+
+    return render_to_response('sources.html', {'sources':sources, 'periods': periods}, RequestContext(request))
diff --git a/bin/earth3d.py b/bin/earth3d.py
new file mode 100755 (executable)
index 0000000..367bccc
--- /dev/null
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import math
+
+# earth mean radius: 6371 km
+# nautical mile mean size: 1.8523 km
+EARTH_RADIUS_NM = 6371./1.8523
+AIS_ANGLE_TO_RADIAN = 1/600000.0*math.pi/180
+
+def latlon_to_xyz_rad(lat, lon):
+    """
+    Transform a latitude longitude in radian
+    into a x,y,z position in nautical miles
+    """
+    coslat = math.cos(lat) # computed only once
+    x = EARTH_RADIUS_NM * math.cos(lon) * coslat
+    y = EARTH_RADIUS_NM * math.sin(lon) * coslat
+    z = EARTH_RADIUS_NM * math.sin(lat)
+    return x, y, z
+
+def latlon_to_xyz_deg(lat, lon):
+    """
+    Transform a latitude longitude in decimal degress
+    into a x,y,z position in nautical miles
+    """
+    return latlon_to_xyz_rad(lat*math.pi/180, lon*math.pi/180)
+
+def latlon_to_xyz_ais(lat, lon):
+    """
+    Transform a latitude longitude in ais format (1/10,000 th of minute)
+    into a x,y,z position in nautical miles
+    """
+    return latlon_to_xyz_rad(lat*AIS_ANGLE_TO_RADIAN, lon*AIS_ANGLE_TO_RADIAN)
+
+def dist3_xyz(pos1, pos2):
+    # return the distance between two xyz points (through Earth)
+    dx = pos1[0] - pos2[0]
+    dy = pos1[1] - pos2[1]
+    dz = pos1[2] - pos2[2]
+    return math.sqrt(dx*dx+dy*dy+dz*dz)
+
+def dist3_latlong_ais(pos1, pos2):
+    return dist3_xyz(latlon_to_xyz_ais(*pos1), latlon_to_xyz_ais(*pos2))
+
+if __name__ == '__main__':
+    eq_af = latlon_to_xyz_deg(0,0) # equator, africa
+    print eq_af
+    eq_in = latlon_to_xyz_deg(0,90) # equator indian ocean
+    print eq_in
+    north = latlon_to_xyz_deg(90, 0) # north pole
+    print north
+    south = latlon_to_xyz_deg(-90, 0) # south pole
+    print south
+    print "distance poles = ", dist3_xyz(north, south)
+    paris = latlon_to_xyz_deg(48.+51./60, 2.+21./60)
+    lemans = latlon_to_xyz_deg(48.+1./60, 0.+11./60)
+    print "distance paris/lemans = ", dist3_xyz(paris, lemans), "NM"
diff --git a/bin/gpsdec.py b/bin/gpsdec.py
new file mode 100755 (executable)
index 0000000..3919a9f
--- /dev/null
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+
+import sys
+from ctypes import *
+
+class GPSD_AIS_T_5(Structure):
+    _fields_= [
+        ('type_', c_uint),
+        ('repeat', c_uint),
+        ('mmsi', c_uint),
+        ('ais_version', c_uint),
+        ('imo', c_uint),
+        ('callsign', c_char * 21),
+        ('buffer', c_char*1000)
+        ]
+        
+gpsd = cdll.LoadLibrary('libgps.so')
+try:
+    gpsd.aivdm_decode
+except AttributeError:
+    print >> sys.stderr, "Shared library gps doesn't export aivdm_decode, trying gpsd lib"
+    gpsd = cdll.LoadLibrary('libgpsd.so')
+    gpsd.aivdm_decode
+gpsd.gpsd_report_set_level(1)
+
+sentences = """
+!AIVDM,1,1,,B,402Un8iua=Gss2RJohGenF100@0C,0*32
+!AIVDM,1,1,,B,142LU8@P002PotTH2QFP0?v:0@3m,0*5E
+!AIVDM,1,1,,B,15E<O:001V2Go`@HOwT2MR0>084U,0*1E
+!AIVDM,1,1,,B,19tHTr0P032PquhH1cpqd?vL0D0:,0*5E
+!AIVDM,1,1,,B,142LTS0P002PuDHH0nw2Cc8F0`9j,0*2C
+!AIVDM,1,1,,B,402Un8iua=P0s2RJp>GenF100<0j,0*7A
+!AIVDM,2,1,4,B,542LTS02=K45@<<sD004hEJ0d4l4p0000000001@000005=o0>BDm0CPEC00,0*25
+!AIVDM,2,2,4,B,00000000000,2*23
+!AIVDM,1,1,,B,142LU8@P002PotTH2QFP0?v:0<08,0*74
+""".split('\n')
+
+ais_context = c_buffer('\000' * (4+4+91*4+92+2048+21+4+1024))
+ais_data = GPSD_AIS_T_5()
+for sentence in sentences:
+    if not gpsd.aivdm_decode(sentence, len(sentence)+1, byref(ais_context), byref(ais_data)):
+        continue # incomplete packet
+    if ais_data.type_ != 5:
+        continue
+    print ais_data.mmsi
diff --git a/bin/gpsdecoded.py b/bin/gpsdecoded.py
new file mode 100755 (executable)
index 0000000..6309cd8
--- /dev/null
@@ -0,0 +1,125 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import sys, struct
+from ais import *
+
+from datetime import datetime
+from ntools import datetime_to_timestamp
+
+
+BLACKLIST_MMSI_SYNC = set( (2717205, 2717213, 2717216, 2391300, 2393200, ) )
+
+last_known_timestamp = None
+last_known_timestamp_from = None
+def gpsdecode_to_db(line, olddata, source):
+    global last_known_timestamp
+    global last_known_timestamp_from
+    try:
+        type, line = line.split('|', 1)
+    except ValueError, err:
+        print str(err)
+        print repr(line)
+        return False
+    
+    if type in ('1','2','3'):
+        try:
+            ri, mmsi, status, rot, sog, accuracy, longitude, latitude, cog, heading, utc_second, maneuver, raim, radio = line.split('|')
+        except ValueError, err:
+            print str(err)
+            print repr(line)
+            return False
+        #print ri, mmsi, status, rot, sog, accuracy, longitude, latitude, cog, heading, utc_second, maneuver, raim, radio
+        if olddata:
+            if not last_known_timestamp:
+                print "No sync. Ignoring packet"
+                return False
+            timestamp = last_known_timestamp
+        else:
+            timestamp = datetime_to_timestamp(datetime.utcnow()) # FIXME
+        add_nmea1(mmsi, timestamp, int(status), int(rot), int(sog), int(latitude), int(longitude), int(cog), int(heading), source)
+        return True
+
+    elif type == '4':
+        ri, mmsi, dt, accuracy, longitude, latitude, epfd, raim, radio = line.split('|')
+        # print ri, mmsi, dt, accuracy, longitude, latitude, epfd, raim, radio
+        try:
+            timestamp = datetime_to_timestamp(datetime.strptime(dt, '%Y:%m:%dT%H:%M:%SZ'))
+            if olddata:
+                if last_known_timestamp is None:
+                    last_known_timestamp = timestamp
+                    last_known_timestamp_from = mmsi
+                    print "Synchronisation done"
+                else:
+                    if int(mmsi) in BLACKLIST_MMSI_SYNC:
+                        #print 'Ignoring blacklisted base', mmsi, 'sync', datetime.utcfromtimestamp(timestamp), 'Last known sync stays', datetime.utcfromtimestamp(last_known_timestamp), 'from', last_known_timestamp_from
+                        pass
+                    else:
+                        if timestamp<last_known_timestamp:
+                            print "Ignoring base", mmsi, "sync", datetime.utcfromtimestamp(timestamp), "too old, previous sync was", datetime.utcfromtimestamp(last_known_timestamp), 'from', last_known_timestamp_from
+                        elif timestamp > last_known_timestamp+6*60*60: # 6 hours
+                            print "Ignoring base", mmsi, "sync", datetime.utcfromtimestamp(timestamp), "too far away, previous sync was", datetime.utcfromtimestamp(last_known_timestamp), 'from', last_known_timestamp_from
+                        else:
+                            last_known_timestamp = timestamp
+                            last_known_timestamp_from = mmsi
+        except:
+            pass
+        return False
+
+    elif type == '5':
+        #print line
+        ri, mmsi, imo, ais_version, callsign, name, type, dim_bow, dim_stern, dim_port, dim_starboard, epfd, eta, draught, destination, dte = line.split('|')
+        #print ri, mmsi, imo, ais_version, callsign, name, type, dim_bow, dim_stern, dim_port, dim_starboard, epfd, eta, draught, destination, dte
+        if olddata:
+            if not last_known_timestamp:
+                print "No sync. Ignoring packet"
+                return False
+            timestamp = last_known_timestamp
+        else:
+            timestamp = int(datetime_to_timestamp(datetime.utcnow()))
+        eta1,eta2 = eta.split('T')
+        eta_M, eta_D = eta1.split('-')
+        eta_h, eta_m = eta2[:-1].split(':')
+        #print eta_M, eta_D, eta_h, eta_m
+        add_nmea5_full(mmsi, timestamp, int(imo), name, callsign, int(type), int(dim_bow), int(dim_stern), int(dim_port), int(dim_starboard), int(eta_M), int(eta_D), int(eta_h), int(eta_m), int(draught), destination, source)
+        return False
+
+    else:
+        return False
+
+
+if __name__ == '__main__':
+    from optparse import OptionParser
+
+    parser = OptionParser('%prog [options] sourcename')
+    parser.add_option('--olddata', help="specify that the input is old data. Timestamp will be re-generated from NMEA type 4 messages (Base station time broadcast).", action='store_true', dest='olddata', default=False)
+    parser.add_option('--db', help="path to filesystem database. Default=%default", action='store', dest='db', default=DBPATH)
+    parser.add_option('--max-count', help="maximum number of lines read", action='store', type='int', dest='max_count')
+    (options, args) = parser.parse_args()
+
+    DBPATH = options.db
+
+    if len(args) != 1 or len(args[0])>4:
+        print >> sys.stderr, 'You must give a 4 letters source name'
+        sys.exit(1)
+    source = args[0]
+    source += ' '*(4-len(source)) # pad with spaces
+
+    while (True):
+        line = sys.stdin.readline()
+        if not line:
+            break
+        while len(line) and line[:1] in '\r\n':
+            line = line[:-1] # remove \n
+        if not line:
+            continue
+        if line.startswith('gpsdecode: '):
+            continue
+        try:
+            gpsdecode_to_db(line, olddata=options.olddata, source=source)
+        except Exception, e:
+            print >> sys.stderr, repr(e)
+            print >> sys.stderr, 'Unhandled exception while parsing '+repr(line)
+        except:
+            print >> sys.stderr, 'Unhandled raise while parsing '+repr(line)
+
diff --git a/bin/html_parser.py b/bin/html_parser.py
new file mode 100755 (executable)
index 0000000..40c2875
--- /dev/null
@@ -0,0 +1,490 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+
+import sys, htmlentities
+from optparse import OptionParser
+
+VERBOSE_PARSER = False
+
+TI_EMPTY    = 1 # there's not content in these tags, ie assume <tagname ... / >
+taginfo = {
+    u'meta': TI_EMPTY,
+    u'link': TI_EMPTY,
+    u'br':  TI_EMPTY,
+    u'img':  TI_EMPTY,
+    u'hr':  TI_EMPTY,
+}
+
+class Node:
+    class Flags:
+        ROOT    = 1 # this is the root node. There can be only one root
+        CLOSING = 2 # this is a closing tag such as </b>. This tags from the lexer are discarded by the parser
+        CLOSED  = 4 # this is closed. Uncleaned output will only have closing tag if that flag is present.
+
+    def __init__(self):
+        self.father = None
+        self.children = []
+        self.flags = 0
+
+class Tag(Node):
+    def __init__(self):
+        Node.__init__(self)
+        self.name = u''
+        self.attributes = {}
+
+    def get_tag_info(self):
+        """
+        Returns TI_ flags base on the name of the name
+        """
+        return taginfo.get(self.name, 0)
+
+    def __unicode__(self):
+        #assert self.name != u''
+        result = u'<'
+        if self.flags & Node.Flags.CLOSING:
+            result += u'/'
+        result += self.name
+        for k,v in self.attributes.iteritems():
+            #result += u' (('+k+u'))'
+            result += u' '+k
+            if v:
+                result += u'="'+v.replace(u'\\', u'\\\\').replace(u'"', u'\\"')+'"'
+        result += u'>'
+        return result
+
+    def __repr__(self):
+        #return 'Tag'+unicode(self).encode('utf8')
+        return unicode(self).encode('utf8')
+
+class Leaf(Node):
+    # TODO: rename this to CDATA or whatever
+    def __init__(self, text):
+        Node.__init__(self)
+        self.text = htmlentities.resolve(text)
+    def __unicode__(self):
+        return self.text # FIXME escape ?
+    def __repr__(self):
+        #return 'Leaf<'+repr(self.text.encode('utf8'))+'>'
+        return repr(self.text.encode('utf8'))
+
+
+def html_lexer(page):
+    """
+    That iterator yields Nodes with father/children unset
+    """
+    buf = page # buffer
+    pos = 0 # everything before that position has already been parsed
+    l = len(buf) # constant length
+    state = 0
+
+    def buffind(token):
+        r = buf.find(token, pos)
+        if r==-1:
+            return None
+        return r
+
+    def get_next_tag():
+        state = 'INIT'
+        state_white_skiping = False
+        p = pos # will start with skipping '<'
+        tag = Tag()
+        while True:
+            p += 1
+            if p>=l: # EOS
+                return None, p # what about last?
+            c = buf[p]
+           
+            if state_white_skiping:
+                if ord(c)<=32:
+                    continue
+                else:
+                    state_white_skiping = False
+                
+            if state == 'INIT':
+                if c == u'/':
+                    tag.flags += Node.Flags.CLOSING
+                    continue
+                elif c == u'>':
+                    return tag, p+1
+                else:
+                    state = 'NAME'
+                    tag.name += c.lower()
+                    continue
+            elif state == 'NAME':
+                if ord(c)<=32 or c==u'/':
+                    state = 'ATT_NAME'
+                    att_name = u''
+                    state_white_skiping = True
+                    continue
+                elif c == u'>':
+                    return tag, p+1
+                else:
+                    tag.name += c.lower()
+                    continue
+            elif state == 'ATT_NAME':
+                if ord(c)<=32:
+                    state = 'ATT_EQUALS'
+                    state_white_skiping = True
+                    continue
+                elif c == u'=':
+                    state = 'ATT_VALUE'
+                    state_white_skiping = True
+                    att_value = u''
+                    continue
+                elif c == u'>':
+                    if att_name != u'':
+                        tag.attributes[att_name] = u''
+                    return tag, p+1
+                else:   
+                    att_name += c.lower()
+                    continue
+            elif state == 'ATT_EQUALS':
+                if ord(c)<=32:
+                    continue
+                elif c == u'=':
+                    state = 'ATT_VALUE'
+                    state_white_skiping = True
+                    att_value = u''
+                    continue
+                elif c == u'>':
+                    if att_name != u'':
+                        tag.attributes[att_name] = u''
+                    return tag, p+1
+                else:
+                    if att_name != u'':
+                        tag.attributes[att_name] = u''
+                    state = 'ATT_NAME'
+                    att_name = c.lower()
+                    state_white_skiping = True
+                    continue
+            elif state == 'ATT_VALUE':
+                if att_value == u'': # first char
+                    if c == u'"' or c == u"'":
+                        att_value_escape = c
+                        state = 'ATT_VALUE_QUOTED'
+                        continue
+                if ord(c)<32:
+                    tag.attributes[att_name] = att_value
+                    state = 'ATT_NAME'
+                    state_white_skiping = True
+                    att_name = u''
+                    continue
+                elif c == u'>':
+                    tag.attributes[att_name] = att_value
+                    return tag, p+1
+                else:
+                    att_value += c
+                    continue
+            elif state == 'ATT_VALUE_QUOTED':
+                if c == att_value_escape:
+                    tag.attributes[att_name] = att_value
+                    state = 'ATT_NAME'
+                    state_white_skiping = True
+                    att_name = u''
+                    continue
+                else:
+                    att_value += c
+                    continue
+
+    while True:
+        # get next tag position
+        # TODO: check it's a real tag and not a fragment that should added to that leafnode
+        pt1 = buffind(u'<')
+        if pt1 != pos:
+            yield Leaf(buf[pos:pt1])
+            if pt1 is None:
+                return
+        pos = pt1
+        
+        tag, pos = get_next_tag()
+        yield tag
+
+
+def html_parse(page):
+    """
+    This function fetches the nodes from the lexer and assemble them in a node tree
+    """
+    root = Tag()
+    root.flags = Node.Flags.ROOT
+    father = root
+    for node in html_lexer(page):
+        if isinstance(node, Leaf):
+            node.father = father
+            father.children.append(node)
+        elif node.flags & Node.Flags.CLOSING:
+            # change current father
+            newfather = father
+            while True:
+                # TODO: optimize with Node.Flags.ROOT
+                if newfather is None:
+                    #TODO: log.debug()
+                    if VERBOSE_PARSER:
+                        print >> sys.stderr, 'Closing tag', node, 'does not match any opening tag. Discarding.'
+                    break
+                if newfather.name == node.name:
+                    newfather.flags |= Node.Flags.CLOSED
+                    if VERBOSE_PARSER:
+                        if newfather != father:
+                            print >> sys.stderr, 'Closing tag', node, 'has auto-closed other nodes',
+                            deb = father
+                            while deb != newfather:
+                                print >> sys.stderr, deb,
+                                deb = deb.father
+                            print >> sys.stderr
+                    father = newfather.father
+                    break
+                newfather = newfather.father
+        else:
+            node.father = father
+            father.children.append(node)
+            #print 'node=',node,'info=',node.get_tag_info()
+            if not node.get_tag_info() & TI_EMPTY:
+                father = node
+        #print 'node=',node,'father=',father
+    return root
+
+
+def print_idented_tree(node, identation_level=-1):
+    if not node.flags & Node.Flags.ROOT:
+        print '   '*identation_level+repr(node)
+    for c in node.children:
+        print_idented_tree(c, identation_level+1)
+    if isinstance(node, Tag) and (node.flags&Node.Flags.CLOSED):
+        print '   '*identation_level+'</'+node.name.encode('utf8')+'>'
+
+def print_lexer_tree(p):
+    identing = 0
+    for item in html_lexer(p):
+        if isinstance(item, Tag) and item.flags & Node.Flags.CLOSING:
+            identing -= 1
+        print '   '*identing,
+        if isinstance(item, Tag) and not item.flags & Node.Flags.CLOSING:
+            identing += 1
+        print repr(item)
+
+
+def get_elem(root, tagname):
+    """
+    Returns all the elements whose name matches
+    But not from the children of thoses
+    """
+    if isinstance(root, Leaf):
+        return []
+    if root.name == tagname:
+        return [ root ]
+    results = []
+    for node in root.children:
+        match = get_elem(node, tagname)
+        if match:
+            results += match
+    return results
+        
+
+def split_table(table):
+    """
+    Returns table content as a list (rows) of list (columns)
+    """
+    ctr = []
+    for tr in get_elem(table, u'tr'):
+        ctd = []
+        for td in get_elem(tr, u'td'):
+            ctd += [ td ]
+        ctr.append(ctd)
+    return ctr
+
+def split_table_r_to_leaf(root):
+    """
+    Recursivly split tables as descibed in split_table
+    Only returns leaf text or list for sub tables
+    """
+    result = []
+    tables = get_elem(root, u'table')
+    if len(tables)==0:
+        return get_merged_leaf_content(root)
+    for table in tables:
+        rrow = []
+        for row in split_table(table):
+            rcol = []
+            for col in row:
+                subr = split_table_r_to_leaf(col)
+                rcol.append(subr)
+            rrow.append(rcol)
+        result.append(rrow)
+    return result
+        
+
+def get_merged_leaf_content(root):
+    """
+    Returns all the leaf content agregated in a string
+    """
+    if isinstance(root, Leaf):
+        return root.text
+
+    result = u''
+    for node in root.children:
+        result += get_merged_leaf_content(node)
+    return result
+
+
+get_inner_text = get_merged_leaf_content
+
+
+def ets_decode_html(event_html):
+    def ets_cleanup(target):
+        if isinstance(target, unicode):
+            return target.replace(u'\xa0',u'').strip()
+        else:
+            return [ ets_cleanup(i) for i in target ]
+
+    def ets_print(prefix, target):
+        has_sublists = False
+        for i in target:
+            if isinstance(i, list):
+                has_sublists = True
+        if isinstance(target, unicode):
+            if target:
+                print prefix, repr(target.encode('utf8'))
+            return
+        if not has_sublists:
+            if len(target) == 1 and target[0]==u'':
+                return
+            print prefix, [ i.encode('utf8') for i in target ]
+            return
+        for i,sub in enumerate(target):
+            ets_print(prefix+u'-'+unicode(i), sub)
+
+    def list_pack(root):
+        if isinstance(root, unicode):
+            return root
+        result = []
+        for i in root:
+            i = list_pack(i)
+            if i and i!=u'':
+                result.append(i)
+        if len(result)==1:
+            result = result[0]
+        return result
+
+    assert isinstance(event_html, unicode)
+    root = html_parse(event_html)
+    lists = ets_cleanup(split_table_r_to_leaf(root))
+    main = lists[0][1][1]
+    
+    result = {}
+    
+    header = main[0][0][0][0][0]
+    #ets_print(u'header', header)
+    result[u'GSM No'] = header[1]
+    result[u'Unit name'] = header[2][1:] # skip '-'
+    result[u'SIMICC'] = header[4]
+
+    #ets_print(u'block1', main[0])
+    for row in main[0]:
+        if len(row)>1:
+            result[row[0]] = row[2]
+    del main[0]
+
+    # FIXME move in the main loop bellow
+    if main[0][0][0] == u'Cell No.':
+        #ets_print('Cells',  main[0])
+        cells = []
+        for row in main[0][1:]:
+            if len(row)<2:
+                continue
+            cell = {}
+            for i, key in enumerate(main[0][0]):
+                if not key or not i: # skip "Cell No." column
+                    continue
+                value = row[i]
+                if i==8:
+                    if value: # In some rare case, RSSI is invalid see event 9547789
+                        value = value[0][0][0] # RSSI
+                cell[key] = value
+            cells.append(cell)
+        result[u'cells'] = cells
+        del main[0]
+
+    for block in main:
+        blockname = list_pack(block[0])
+        assert isinstance(blockname, unicode)
+        packed_block = list_pack(block)
+
+        if blockname == u'Decoding of message':
+            assert packed_block[1] == [ u'Parameter', u'Value', u'Para', u'UC', u'Description' ]
+            decoding = []
+            for row in packed_block[2:]:
+                if not isinstance(row, list):
+                    continue # not interested in incomplete information such as 'IncomingData ID' with no data!
+                if row[0] == 'IncomingData ID':
+                    result[u'IncomingData ID'] = row[1]
+                    continue
+                if len(row) != len(packed_block[1]):
+                    continue # discard lines that have incorrect length, such as RSSI request to propos
+                line = {}
+                for i, key in enumerate(packed_block[1]):
+                    line[key] = row[i]
+                decoding.append(line)
+            result['decoding'] = decoding
+
+        elif blockname == u'Message contains debug information':
+            debug={}
+            for k,v in packed_block[1:]:
+                debug[k] = v
+            result['debug'] = debug
+        elif blockname == u'Positions related':
+            assert packed_block[1] == [ u'ID', u'TimeStamp', u'PosType', u'LAT', u'LON', u'Status' ]
+            positions_related=[]
+            for row in packed_block[2:]:
+                line = {}
+                for i, key in enumerate(packed_block[1]):
+                    line[key] = row[i]
+                positions_related.append(line)
+            result['positions_related'] = positions_related
+        elif blockname == u'Outgoing requests':
+            assert not list_pack(block[1])
+            table_header = block[2]
+            assert list_pack(table_header) == [ u'Time', u'Outg.ID', u'ParaNo', u'Parameter', u'Value', u'Status', u'User', u'UC' ]
+            
+            result_table = []
+            for row in block[3:]:
+                if row == [ u'' ]:
+                    continue # ignore empty lines
+                result_line = {}
+                for i, key in enumerate(table_header):
+                    if not key:
+                        continue # ignore empty columns
+                    result_line[key] = row[i]
+                result_table.append(result_line)
+
+            result['outgoing_requests'] = result_table
+
+        else:
+            ets_print(u'unsupported block - ]', packed_block)
+
+    return result
+
+if __name__ == "__main__":
+    parser = OptionParser()
+    parser.add_option("--dump-lexer", help="Debug: Dump idented lexer output", action='store_true', dest='lexer_dump', default=False)
+    parser.add_option("--dump-parser", help="Debug: Dump idented parser output", action='store_true', dest='parser_dump', default=False)
+    parser.add_option("--verbose-parser", help="Debug: Verbose parser errors", action='store_true', dest='verbose_parser', default=False)
+    (options, args) = parser.parse_args()
+
+    try:
+        filename = args[0]
+    except IndexError:
+        print >> sys.stderr, 'Need a filename'
+        sys.exit(-1)
+
+    VERBOSE_PARSER = options.verbose_parser
+    p = unicode(file(filename).read(), 'utf-8')
+   
+    if options.lexer_dump:
+        print_lexer_tree(p)
+        sys.exit(0)
+
+    if options.parser_dump:
+        root = html_parse(p)
+        print_idented_tree(root)
+        sys.exit(0)
+
diff --git a/bin/make-countries.py b/bin/make-countries.py
new file mode 100755 (executable)
index 0000000..7a92415
--- /dev/null
@@ -0,0 +1,275 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Country file generator
+#
+# taken from http://www.itu.int/cgi-bin/htsh/glad/cga_mids.sh?lng=E
+#
+s='''
+201 Albania (Republic of)
+202 Andorra (Principality of)
+203 Austria
+204 Azores
+205 Belgium
+206 Belarus (Republic of)
+207 Bulgaria (Republic of)
+208 Vatican City State
+209, 210    Cyprus (Republic of)
+211 Germany (Federal Republic of)
+212 Cyprus (Republic of)
+213 Georgia
+214 Moldova (Republic of)
+215 Malta
+216 Armenia (Republic of)
+218 Germany (Federal Republic of)
+219, 220    Denmark
+224, 225    Spain
+226, 227, 228   France
+230 Finland
+231 Faroe Islands
+232, 233, 234, 235  United Kingdom
+236 Gibraltar
+237 Greece
+238 Croatia (Republic of)
+239, 240    Greece
+242 Morocco (Kingdom of)
+243 Hungary (Republic of)
+244, 245, 246   Netherlands (Kingdom of the)
+247 Italy
+248, 249    Malta
+250 Ireland
+251 Iceland
+252 Liechtenstein (Principality of)
+253 Luxembourg
+254 Monaco (Principality of)
+255 Madeira
+256 Malta
+257, 258, 259   Norway
+261 Poland (Republic of)
+262 Montenegro
+263 Portugal
+264 Romania
+265, 266    Sweden
+267 Slovak Republic
+268 San Marino (Republic of)
+269 Switzerland (Confederation of)
+270 Czech Republic
+271 Turkey
+272 Ukraine
+273 Russian Federation
+274 The Former Yugoslav Republic of Macedonia
+275 Latvia (Republic of)
+276 Estonia (Republic of)
+277 Lithuania (Republic of)
+278 Slovenia (Republic of)
+279 Serbia (Republic of)
+301 Anguilla
+303 Alaska (State of)
+304, 305    Antigua and Barbuda
+306 Netherlands Antilles
+307 Aruba
+308, 309    Bahamas (Commonwealth of the)
+310 Bermuda
+311 Bahamas (Commonwealth of the)
+312 Belize
+314 Barbados
+316 Canada
+319 Cayman Islands
+321 Costa Rica
+323 Cuba
+325 Dominica (Commonwealth of)
+327 Dominican Republic
+329 Guadeloupe (French Department of)
+330 Grenada
+331 Greenland
+332 Guatemala (Republic of)
+334 Honduras (Republic of)
+336 Haiti (Republic of)
+338 United States of America
+339 Jamaica
+341 Saint Kitts and Nevis (Federation of)
+343 Saint Lucia
+345 Mexico
+347 Martinique (French Department of)
+348 Montserrat
+350 Nicaragua
+351, 352, 353, 354, 355, 356, 357  Panama (Republic of)
+358 Puerto Rico
+359 El Salvador (Republic of)
+361 Saint Pierre and Miquelon (Territorial Collectivity of)
+362 Trinidad and Tobago
+364 Turks and Caicos Islands
+366, 367, 368, 369  United States of America
+370, 371, 372   Panama (Republic of)
+375, 376, 377   Saint Vincent and the Grenadines
+378 British Virgin Islands
+379 United States Virgin Islands
+401 Afghanistan
+403 Saudi Arabia (Kingdom of)
+405 Bangladesh (People's Republic of)
+408 Bahrain (Kingdom of)
+410 Bhutan (Kingdom of)
+412, 413    China (People's Republic of)
+416 Taiwan (Province of China)
+417 Sri Lanka (Democratic Socialist Republic of)
+419 India (Republic of)
+422 Iran (Islamic Republic of)
+423 Azerbaijani Republic
+425 Iraq (Republic of)
+428 Israel (State of)
+431, 432    Japan
+434 Turkmenistan
+436 Kazakhstan (Republic of)
+437 Uzbekistan (Republic of)
+438 Jordan (Hashemite Kingdom of)
+440, 441    Korea (Republic of)
+443 Palestine (In accordance with Resolution 99 Rev. Antalya, 2006)
+445 Democratic People's Republic of Korea
+447 Kuwait (State of)
+450 Lebanon
+451 Kyrgyz Republic
+453 Macao (Special Administrative Region of China)
+455 Maldives (Republic of)
+457 Mongolia
+459 Nepal (Federal Democratic Republic of)
+461 Oman (Sultanate of)
+463 Pakistan (Islamic Republic of)
+466 Qatar (State of)
+468 Syrian Arab Republic
+470 United Arab Emirates
+473, 475    Yemen (Republic of)
+477 Hong Kong (Special Administrative Region of China)
+478 Bosnia and Herzegovina
+501 Adelie Land
+503 Australia
+506 Myanmar (Union of)
+508 Brunei Darussalam
+510 Micronesia (Federated States of)
+511 Palau (Republic of)
+512 New Zealand
+514, 515    Cambodia (Kingdom of)
+516 Christmas Island (Indian Ocean)
+518 Cook Islands
+520 Fiji (Republic of)
+523 Cocos (Keeling) Islands
+525 Indonesia (Republic of)
+529 Kiribati (Republic of)
+531 Lao People's Democratic Republic
+533 Malaysia
+536 Northern Mariana Islands (Commonwealth of the)
+538 Marshall Islands (Republic of the)
+540 New Caledonia
+542 Niue
+544 Nauru (Republic of)
+546 French Polynesia
+548 Philippines (Republic of the)
+553 Papua New Guinea
+555 Pitcairn Island
+557 Solomon Islands
+559 American Samoa
+561 Samoa (Independent State of)
+563, 564, 565   Singapore (Republic of)
+567 Thailand
+570 Tonga (Kingdom of)
+572 Tuvalu
+574 Viet Nam (Socialist Republic of)
+576 Vanuatu (Republic of)
+578 Wallis and Futuna Islands
+601 South Africa (Republic of)
+603 Angola (Republic of)
+605 Algeria (People's Democratic Republic of)
+607 Saint Paul and Amsterdam Islands
+608 Ascension Island
+609 Burundi (Republic of)
+610 Benin (Republic of)
+611 Botswana (Republic of)
+612 Central African Republic
+613 Cameroon (Republic of)
+615 Congo (Republic of the)
+616 Comoros (Union of the)
+617 Cape Verde (Republic of)
+618 Crozet Archipelago
+619 Côte d'Ivoire (Republic of)
+621 Djibouti (Republic of)
+622 Egypt (Arab Republic of)
+624 Ethiopia (Federal Democratic Republic of)
+625 Eritrea
+626 Gabonese Republic
+627 Ghana
+629 Gambia (Republic of the)
+630 Guinea-Bissau (Republic of)
+631 Equatorial Guinea (Republic of)
+632 Guinea (Republic of)
+633 Burkina Faso
+634 Kenya (Republic of)
+635 Kerguelen Islands
+636, 637    Liberia (Republic of)
+642 Socialist People's Libyan Arab Jamahiriya
+644 Lesotho (Kingdom of)
+645 Mauritius (Republic of)
+647 Madagascar (Republic of)
+649 Mali (Republic of)
+650 Mozambique (Republic of)
+654 Mauritania (Islamic Republic of)
+655 Malawi
+656 Niger (Republic of the)
+657 Nigeria (Federal Republic of)
+659 Namibia (Republic of)
+660 Reunion (French Department of)
+661 Rwanda (Republic of)
+662 Sudan (Republic of the)
+663 Senegal (Republic of)
+664 Seychelles (Republic of)
+665 Saint Helena
+666 Somali Democratic Republic
+667 Sierra Leone
+668 Sao Tome and Principe (Democratic Republic of)
+669 Swaziland (Kingdom of)
+670 Chad (Republic of)
+671 Togolese Republic
+672 Tunisia
+674 Tanzania (United Republic of)
+675 Uganda (Republic of)
+676 Democratic Republic of the Congo
+677 Tanzania (United Republic of)
+678 Zambia (Republic of)
+679 Zimbabwe (Republic of)
+701 Argentine Republic
+710 Brazil (Federative Republic of)
+720 Bolivia (Plurinational State of)
+725 Chile
+730 Colombia (Republic of)
+735 Ecuador
+740 Falkland Islands (Malvinas)
+745 Guiana (French Department of)
+750 Guyana
+755 Paraguay (Republic of)
+760 Peru
+765 Suriname (Republic of)
+770 Uruguay (Eastern Republic of)
+775 Venezuela (Bolivarian Republic of)
+'''
+
+CLEANUP = True
+
+for line in s.split('\n'):
+    if not line:
+        continue
+    if CLEANUP:
+        pos = line.find('(')
+        if pos != -1:
+            line = line[:pos]
+    codes = []
+    while True:
+        codes.append(line[:3])
+        line = line[3:]
+        if line[0:2]==', ':
+            line = line[2:]
+        else:
+            break
+    line = line.strip()
+    for code in codes:
+        #print code+':', repr(line)+','
+        print code+'\t'+line
+
+
diff --git a/bin/nmea.py b/bin/nmea.py
new file mode 100644 (file)
index 0000000..84581b1
--- /dev/null
@@ -0,0 +1,397 @@
+#MESSAGE_TYPES = {
+#     1: 'Position Report Class A',
+#     2: 'Position Report Class A (Assigned schedule)',
+#     3: 'Position Report Class A (Response to interrogation)',
+#     4: 'Base Station Report',
+#     5: 'Ship and Voyage data',
+#     6: 'Addressed Binary Message',
+#     7: 'Binary Acknowledge',
+#     8: 'Binary Broadcast Message',
+#     9: 'Standard SAR Aircraft Position Report',
+#    10: 'UTC and Date Inquiry',
+#    11: 'UTC and Date Response',
+#    12: 'Addressed Safety Related Message',
+#    13: 'Safety Related Acknowledge',
+#    14: 'Safety Related Broadcast Message',
+#    15: 'Interrogation',
+#    16: 'Assigned Mode Command',
+#    17: 'GNSS Binary Broadcast Message',
+#    18: 'GNSS Binary Broadcast Message',
+#    19: 'Extended Class B Equipment Position Report',
+#    20: 'Data Link Management',
+#    21: 'Aids-to-navigation Report',
+#    22: 'Channel Management',
+#    23: 'Group Assignment Command',
+#    24: 'Class B CS Static Data Report',
+#    25: 'Binary Message, Single Slot',
+#    26: 'Binary Message, Multiple Slot',
+#}
+
+
+#def log_stderr(txt):
+#    print >> sys.stderr, txt
+#
+#
+#def ais_packet_reader(source, debug_discarded=False):
+#    """
+#    Reads raw packets from a source,
+#    Check the CRC discarding invalid ones
+#    Assemble fragments, taking care of channels A/B multiplexing
+#    Yield armored packets
+#    """
+#   
+#    if not debug_discarded:
+#        log = lambda(x): None
+#    else:
+#        log = log_stderr
+#    payload_fragments = {} # channel -> payload
+#    payload_last_fragid = {} # channel -> last_frag_id
+#
+#    for line in source:
+#        while True:
+#            if len(line)==0:
+#                continue # skip empty lines
+#            if line[-1] in '\r\n\0':
+#                line = line[:-1]
+#            else:
+#                break
+#   
+#        #print "Read", line
+#        if line[-3] != '*':
+#            log('ERROR: no checksum found in line: '+repr(line))
+#            continue
+#        data,checksum = line.split('*', 1)
+#
+#        crc = 0
+#        for c in data[1:]:
+#            crc ^= ord(c)
+#        crc = '%02X'%crc
+#        if crc != checksum:
+#            log('ERROR: checksum failure in line: '+repr(line)+'. Recomputed CRC is '+crc)
+#            continue
+#
+#        tokens = data.split(',')
+#        if tokens[0]!='!AIVDM':
+#            log('ERROR: Ignoring line not starting with !AIVDM: '+repr(line))
+#            continue
+#        try:
+#            aivdmid, frag_count, frag_id, seq_id, channel, armored_payload, nfillbits = tokens
+#        except ValueError:
+#            log('ERROR: Ignoring line without 6 comas: '+repr(line))
+#            continue
+#
+#        try:
+#            frag_count = int(frag_count)
+#        except ValueError:
+#            log('ERROR: fragment count is not a number: '+repr(line))
+#            continue
+#            
+#        try:
+#            frag_id = int(frag_id)
+#        except ValueError:
+#            log('ERROR: fragment id is not a number: '+repr(line))
+#            continue
+#            
+#        if channel not in 'AB':
+#            log('WARNING: Invalid AIS channel.')
+#            continue
+#
+#        if not payload_fragments.has_key(channel):
+#            payload_fragments[channel] = ''
+#            payload_last_fragid[channel] = 0
+#
+#        if frag_id != payload_last_fragid[channel]+1:
+#            log('ERROR: Lost AIS data fragments. Ignoring previous paylaod for channel '+channel)
+#            payload_fragments[channel] = ''
+#            payload_last_fragid[channel] = 0
+#
+#        payload_fragments[channel] += armored_payload
+#        payload_last_fragid[channel] = frag_id
+#
+#        if frag_id == frag_count:
+#            yield payload_fragments[channel]
+#            payload_fragments[channel] = ''
+#            payload_last_fragid[channel] = 0
+#    # see http://meeting.helcom.fi/c/document_library/get_file?folderId=75444&name=DLFE-30368.pdf
+#
+#
+#    
+#def decode(armored_payload):
+#    #print 'decoding', armored_payload
+#
+#    # decode payload
+#    payload_lenght = len(armored_payload)*6
+#    #print "\tpayload length", payload_lenght, "bits"
+#    payload = ''
+#    nexchar = None
+#    next_offset = 0
+#    for c in armored_payload:
+#        i = ord(c) - 48
+#        if i>=40:
+#            i -= 8
+#        if next_offset == 0:
+#            nextchar = (i<<2)
+#            next_offset = 6
+#        elif next_offset == 6:
+#            payload += chr(nextchar+(i>>4))
+#            nextchar = (i&0xf) << 4
+#            next_offset = 4
+#        elif next_offset == 4:
+#            payload += chr(nextchar+(i>>2))
+#            nextchar = (i&0x3) << 6
+#            next_offset = 2
+#        elif next_offset == 2:
+#            payload += chr(nextchar+(i))
+#            nextchar = None
+#            next_offset = 0
+#        #print 'Added raw "%s": %02X'% (c, i)
+#        #print 'unarmoared_payload:',
+#        #for c in payload:
+#        #    print '%02X'%ord(c),
+#        #print
+#        #print 'next_offset: ', next_offset
+#        #if nextchar is not None:
+#        #    print 'nextchar: %02X'% nextchar
+#    if nextchar is not None:
+#        payload += chr(nextchar)
+#    
+#
+#    def getbit(offset):
+#        return (ord(payload[offset/8])>>(7-offset%8)) & 1
+#
+#    def getbits(bitoffset, cnt, signed=False):
+#        res = 0L
+#        negative = False
+#        while cnt:
+#            bit = getbit(bitoffset)
+#            if signed:
+#                negative = bit==1
+#                signed = False
+#                continue
+#            res = res<<1 | bit
+#            cnt -= 1
+#            bitoffset += 1
+#        if negative:
+#            return -res
+#        return res
+#    
+#    def getbits_binstr(bitoffset, cnt):
+#        result = ''
+#        while cnt:
+#            if getbit(bitoffset):
+#                result += '1'
+#            else:
+#                result += '0'
+#            cnt -= 1
+#        return result
+#
+#
+#    def ais_6_to_8bits(data):
+#        result = ''
+#        while data:
+#            i = data & 0x3f
+#            data >>= 6
+#            if i<32:
+#                i+=64
+#            result = chr(i)+result
+#        return result
+#
+#    def ais_strip(txt):
+#        # remove '@' at the end of text
+#        while len(txt) and txt[-1] in '@ ':
+#            txt = txt[:-1]
+#        return txt
+#            
+#
+#    decoded = {}
+#    #decoded['raw_armored'] = armored_payload
+#    #decoded['raw_unarmored'] = ' '.join([ '%02x'%ord(c) for c in payload])
+#    message_type = getbits(0,6)
+#    decoded['message_type'] = str(message_type)+'-'+MESSAGE_TYPES.get(message_type, 'Unknown') 
+#    if message_type in (1,2,3):
+#        if payload_lenght>37:
+#            decoded['mmsi'] = getbits(8,30)
+#        if payload_lenght>41:
+#            status = getbits(38,4)
+#            decoded['status'] = str(status)+'-'+STATUS_CODES.get(status, 'Unknown')
+#        if payload_lenght>49:
+#            rateofturn = getbits(42, 8, True)
+#            if rateofturn != -128:
+#                decoded['rate_of_turn'] = rateofturn
+#        if payload_lenght>59:
+#            sog = getbits(50, 10) # speed over ground
+#            if sog != 1023:
+#                decoded['speed'] =sog/10.
+#        if payload_lenght>115:
+#            position_accuracy = getbit(60)
+#            longitude = getbits(61,28, True)
+#            latitude = getbits(89, 27, True)
+#            if longitude != 0x6791AC0 and latitude != 0x6791AC0:
+#                latitude = latitude/10000.
+#                longitude = longitude/10000.
+#                decoded['latitude'] = latitude/60.
+#                decoded['longitude'] = longitude/60.
+#        if payload_lenght>127:
+#            cog = getbits(116,12)
+#            decoded['course'] = cog/10.
+#        if payload_lenght>136:
+#            heading = getbits(128,9)
+#            if heading!=511:
+#                decoded['heading'] = heading
+#        if payload_lenght>142:
+#            seconds = getbits(136,6)
+#            decoded['seconds'] = seconds
+#    elif message_type == 4:
+#        if payload_lenght>37:
+#            decoded['mmsi'] = getbits(8,30)
+#        if payload_lenght>77:
+#            dy = getbits(38,14)
+#            dm = getbits(52,4)
+#            dd = getbits(56,5)
+#            th = getbits(61,5)
+#            tm = getbits(66,6)
+#            ts = getbits(71,6)
+#            fulldate = ''
+#            if dy==0:
+#                fulldate+='????'
+#            else:
+#                fulldate+='%04d'%+dy
+#            if dm==0:
+#                fulldate+='-??'
+#            else:
+#                fulldate+='-%02d'%dm
+#            if dd==0:
+#                fulldate+='-??'
+#            else:
+#                fulldate+='-%02d'%dd
+#            if th==24:
+#                fulldate+=' ??'
+#            else:
+#                fulldate+=' %02d'%th
+#            if tm==60:
+#                fulldate+=':??'
+#            else:
+#                fulldate+=':%02d'%tm
+#            if ts==60:
+#                fulldate+=':??'
+#            else:
+#                fulldate+=':%02d'%ts
+#            decoded['datetime'] = fulldate
+#    elif message_type == 5:
+#        if payload_lenght>37:
+#            decoded['mmsi'] = getbits(8,30)
+#        if payload_lenght>39:
+#            ais_version = getbits(38,2)
+#            if ais_version != 0:
+#                log_stderr("ERROR: unknown AIS version "+str(ais_version))
+#        if payload_lenght>69:
+#            imo = getbits(40,30)
+#            decoded['imo'] = imo
+#        if payload_lenght>111:
+#            callsign = getbits(70,42)
+#            decoded['callsign'] = ais_strip(ais_6_to_8bits(callsign))
+#        if payload_lenght>231:
+#            vesselname = getbits(112,120)
+#            decoded['vesselname'] = ais_strip(ais_6_to_8bits(vesselname))
+#        if payload_lenght>239:
+#            vesseltype = getbits(232,8)
+#            decoded['vesseltype'] = str(vesseltype)+'-'+SHIP_TYPES.get(vesseltype, 'Unknown')
+#        if payload_lenght>257:
+#            dim_bow = getbits(240,9)
+#            decoded['dim_bow'] = dim_bow
+#            decoded['length'] = dim_bow + getbits(249,9)
+#        if payload_lenght>269:
+#            dim_port = getbits(258,6)
+#            decoded['dim_port'] = dim_port
+#            decoded['width'] = dim_port + getbits(264,6)
+#        if payload_lenght>293:
+#            decoded['raw_eta'] = getbits_binstr(274, 20)
+#            eta_month = getbits(274,4)
+#            eta_day = getbits(278,5)
+#            eta_hour = getbits(283,5)
+#            eta_min = getbits(288,6)
+#            if True: #eta_day!=0 and eta_month!=0:
+#                decoded['eta'] = '%02d-%02d'%(eta_month,eta_day)
+#                if eta_min<60 and eta_hour<24:
+#                    decoded['eta'] += ' %02d:%02d'%(eta_hour,eta_min)
+#                else:
+#                    decoded['eta'] += ' ??:??'
+#        if payload_lenght>301:
+#            decoded['draught'] = getbits(293,9)/10.
+#        if payload_lenght>421:
+#            decoded['destination'] = ais_strip(ais_6_to_8bits(getbits(302,120)))
+#    return decoded
+
+#def print_nice_message(message):
+#    if message.get('message_type', '  ')[:2] in ('1-', '2-', '3-'):
+#        #print '\t', message
+#        #print '*'*10, 'Position', '*'*10
+#        print "%09d"%message.get('mmsi', 0),
+#        print message.get('status', ''),
+#        print message.get('rate_of_turn', ''),
+#        print message.get('speed', ''),
+#        print message.get('latitude', ''),
+#        print message.get('longitude', ''),
+#        print str(message.get('course', ''))+'°',
+#        print str(message.get('heading', ''))+'°',
+#        print str(message.get('seconds', ''))+'s',
+#        print
+#    elif message.get('message_type', '  ')[:2] == '4-':
+#        print "%09d"%message.get('mmsi', 0),
+#        print "Base station",
+#        print message.get('datetime', '')
+#    elif message.get('message_type', '  ')[:2] == '5-':
+#        #print '*'*10, 'Info', '*'*10
+#        print "%09d"%message.get('mmsi', 0),
+#        print '(%d)'%message.get('imo', 0),
+#        print '(%s)'%message.get('callsign', ''),
+#        print message.get('vesselname', ''),
+#        print message.get('vesseltype', ''),
+#        print '->',
+#        print message.get('eta', ''),
+#        print message.get('destination', ''),
+#        print message.get('length', ''),
+#        print message.get('width', ''),
+#        if 'draught' in message:
+#            print '%.1f' % message['draught']
+#        print message.get('dim_bow', ''),
+#        print message.get('dim_port', ''),
+#        print
+#    else:
+#        print "********* Message ", message.get('message_type', '  ')
+#
+#
+#
+#if __name__ == '__main__':
+#    parser = OptionParser()
+#    parser.add_option('--in', '-i', help="input AIS data file. Default to stdin not implemented yet.", action='store', type='str', dest='filename')
+#    parser.add_option('-u', '--udp-port', help="listen on UDP port for AIS data. Example 4158.", action='store', type='int', dest='udp')
+#    parser.add_option('--max-count', help="maximum number of datagram treated", action='store', type='int', dest='max_count')
+#    #parser.add_option('--debug-fragments', help="prints fragments errors on etderr.", action='store_true', dest='debug_fragments', default=False)
+#    #parser.add_option('--debug-dumpraw', help="prints incoming packets as they come in.", action='store_true', dest='debug_dumppackets', default=False)
+#    (options, args) = parser.parse_args()
+#    
+#    if options.filename and options.udp:
+#        print >> sys.stderr, "Can't have both file and udp as input"
+#        sys.exit(1)
+#    if options.filename:
+#        source = logliner(options.filename)
+#    elif options.udp:
+#        source = udpliner(options.udp)
+#    else:
+#        print >> sys.stderr, "Must have either file or udp as input"
+#        sys.exit(1)
+#    if options.debug_dumppackets:
+#        source = dumpsource(source)
+#   
+#            
+#    count = 0
+#
+#    for data in ais_packet_reader(source, options.debug_fragments):
+#        data = decode(data)
+#        #print_nice_message(data)
+#        count += 1
+#        if count==options.max_count:
+#            break
+#    
diff --git a/bin/ntools.py b/bin/ntools.py
new file mode 100644 (file)
index 0000000..cee166b
--- /dev/null
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+
+import sys, os
+from socket import *
+import calendar
+
+def datetime_to_timestamp(dt):
+    return calendar.timegm(dt.utctimetuple())
+
+def clean_ais_charset(txt):
+    assert isinstance(txt, str)
+    result = ''
+    for c in txt:
+        oc = ord(c)
+        if oc<32 or oc >95:
+            result += ''
+        else:
+            result += c
+    return result
+
+def clean_alnum(txt):
+    assert isinstance(txt, str)
+    result = ''
+    for c in txt:
+        if ( c>='0' and c<='9' ) or ( c>='A' and c<='Z' ):
+            result += c
+    return result
+
+def clean_alnum_unicode(txt):
+    assert isinstance(txt, unicode)
+    return unicode(clean_alnum(txt.encode('ascii7', 'replace')))
+    
+    
+def open_with_mkdirs(filename, mode):
+    try:
+        return file(filename, mode)
+    except IOError, ioerr:
+        # FIXME only if doesn't exists ...
+        #print 'Creating directory', os.path.dirname(filename)
+        os.makedirs(os.path.dirname(filename))
+        return file(filename, mode)
+
+
+# udp source
+def udpliner(port):
+    s = socket(AF_INET6,SOCK_DGRAM)
+    s.bind(('', port))
+    while True:
+        data,addr = s.recvfrom(1024)
+        for line in data.split('\n'):
+            while True:
+                if len(line)==0:
+                    break
+                if line[0] in '\r\n\0':
+                    line = line[1:]
+                    continue
+                if line[-1] in '\r\n\0':
+                    line = line[:-1]
+                    continue
+                break
+            if len(line):
+                yield line
+    s.close() # never reached
+
+
+# log file source
+def logliner(filename):
+    for line in file(filename).readlines():
+        yield line
+
+
+# debug/display wraper source
+def dumpsource(source):
+    for line in source:
+        while line and line[-1] in '\r\n\0':
+            line = line[:-1]
+        print "INPUT", line
+        yield line
+
+def xml_escape(txt):
+    return txt.replace(u'&', u'&amp;').replace(u'<', u'&lt;')
+
+def alarm():
+    os.system('touch /home/nirgal/kod/ais/alarm &')
+
diff --git a/bin/show_targets_planes.py b/bin/show_targets_planes.py
new file mode 100755 (executable)
index 0000000..075c938
--- /dev/null
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import sys
+from optparse import OptionParser
+from datetime import datetime, timedelta
+
+from db import *
+from ais import *
+
+cursor = get_common_cursor()
+
+def go():
+    print '<?xml version="1.0" encoding=\"UTF-8\"?>'
+    print '<kml xmlns="http://earth.google.com/kml/2.1">'
+    print '<Document>'
+    print '<name>BTF spotters</name>'
+    
+    def print_style(name,icon,heading=None):
+        stylename = name
+        if heading is not None:
+            stylename+='-'+str(heading)
+        print '<Style id="%s">' % stylename
+        print '  <LabelStyle>'
+        print '    <scale>0.7</scale>'
+        print '  </LabelStyle>'
+        print '  <IconStyle>'
+        print '    <Icon>'
+        print '      <href>%s</href>' % icon
+        print '    </Icon>'
+        if heading is not None:
+            print '    <heading>%d</heading>' % heading
+        print '    <scale>0.5</scale>'
+        print '    <hotSpot x="0.5" y="0.5" xunits="fraction" yunits="fraction"/>'
+        print '  </IconStyle>'
+        print '</Style>'
+    
+    print_style('plane', 'plane.png')
+   
+    sql = u'''
+    SELECT reg, ads, type, flight, plane.usr, updated, lat, lon
+        FROM plane
+        JOIN ppuser
+            ON (plane.usr = ppuser.usr)
+        WHERE reg != '        '
+        ORDER BY reg
+    '''
+    sqlexec(sql)
+
+    for reg, ads, type, flight, usr, updated, latitude, longitude  in cursor.fetchall():
+        if not latitude or not longitude:
+            continue
+        def _xml(txt):
+            if txt:
+                return txt.encode('utf8').replace('<', '&lt;')
+            else:
+                return ''
+
+        print '<Placemark>'
+        print '<name>',
+        if reg:
+            print _xml(reg),
+        else:
+            print 'ADS', ads,
+        print '</name>'
+
+        print '<description><![CDATA['
+        print 'Last seen: %s<br/>' % str(updated)
+        print 'ADS:', ads, '<br/>'
+        print 'Plane type:', type, '<br/>'
+        print 'Flight:', flight, '<br/>'
+        print "Warning, position is a wild guess! It's computed by averaging the positions broadcasted by other planes and received by the same ground station.<br/>"
+        print ']]>'
+        print '</description>'
+
+        style = 'plane'
+        print '<styleUrl>#%s</styleUrl>' % style
+
+        print '<Point>'
+        print '<altitudeMode>relativeToGround</altitudeMode>'
+        print '<coordinates>'+str(longitude)+','+str(latitude)+',0</coordinates>'
+        print '</Point>'
+        print '</Placemark>'
+    
+    print '</Document>'
+    print '</kml>'
+
+if __name__ == "__main__":
+    parser = OptionParser()
+    options, args = parser.parse_args()
+
+    go()
diff --git a/bin/show_targets_ships.py b/bin/show_targets_ships.py
new file mode 100755 (executable)
index 0000000..52472ff
--- /dev/null
@@ -0,0 +1,661 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+__all__ = [ 'format_fleet', 'format_boat_intime', 'format_boat_track', 'STYLE', 'KML_DISPLAYOPT_NONAMES', 'KML_DISPLAYOPT_HISTORICAL', 'KML_DISPLAYOPT_SOURCES', 'kml_to_kmz' ]
+
+import sys, logging, struct, zipfile
+from StringIO import StringIO # TODO use python 2.6 io.BufferedWrite(sys.stdout, )
+from datetime import datetime, timedelta
+import copy
+
+from ais import *
+from area import Area, load_area_from_kml_polygon
+from ntools import datetime_to_timestamp
+
+
+KML_DISPLAYOPT_NONAMES = 1 # don't print ship name
+KML_DISPLAYOPT_HISTORICAL = 2 # never show ship track as lost
+KML_DISPLAYOPT_SOURCES = 4 # display sources
+
+
+LOST_PERIOD = timedelta(1)
+
+KML_HEADER =u'''\
+<?xml version="1.0" encoding="UTF-8"?>
+<kml xmlns="http://www.opengis.net/kml/2.2"
+    xmlns:gx="http://www.google.com/kml/ext/2.2">
+<Document>
+'''
+
+KML_FOOTER =u'''\
+</Document>
+</kml>
+'''
+
+   
+
+class Style:
+    def __init__(self):
+        self.label_size = 0.7
+        self.icon_size = 0.5 # 0.2
+        self.used_icons = set()
+
+    def _format_style(self, stylename, icon, heading=None, color=None):
+        """
+        color format is google styled: aabbggrr
+        example ffea00ff for purple
+        """
+        result = u''
+        if heading is not None:
+            stylename+='-'+str(heading)
+        result += '<Style id="%s">\n' % stylename
+        result += '  <LabelStyle>\n'
+        result += '    <scale>%f</scale>\n' % self.label_size
+        result += '  </LabelStyle>\n'
+        result += '  <IconStyle>\n'
+        result += '    <Icon>\n'
+        result += '      <href>%s</href>\n' % icon
+        result += '    </Icon>\n'
+        if heading is not None:
+            result += '    <heading>%d</heading>\n' % heading
+        if color is not None:
+            result += '    <color>%s</color>\n' % color
+        result += '    <scale>%f</scale>\n' % self.icon_size
+        result += '    <hotSpot x="0.5" y="0.5" xunits="fraction" yunits="fraction"/>\n'
+        result += '  </IconStyle>\n'
+        result += '</Style>\n'
+        self.used_icons.add(icon)
+        return result
+    
+    def make_header(self):
+        raise NotImplementedError # abstract class
+    
+    def get_style_name(self, nmea):
+        raise NotImplementedError # abstract class
+
+
+class FishersStyle(Style):
+    def make_header(self):
+        white = None
+        green = 'ff86fd5f' # '5f-fd-86'
+        yellow = 'ff86eeff' #'ff-ee-86'
+        red = 'ff5865fc' #'fc-65-58'
+        result = u''
+        result += self._format_style('landstation', 'capital_small.png')
+
+        result += self._format_style('base-stop', 'boat-stop.png', color=white)
+        result += self._format_style('fisher-stop', 'boat-stop.png', color=red)
+        result += self._format_style('tug-stop', 'boat-stop.png', color=green)
+        result += self._format_style('auth-stop', 'boat-stop.png', color=yellow)
+        
+        for heading in [ None ] + range(0, 360, 10):
+            result += self._format_style('base', 'boat.png', color=white, heading=heading)
+            result += self._format_style('fisher', 'boat.png', color=red, heading=heading)
+            result += self._format_style('tug', 'boat.png', color=green, heading=heading)
+            result += self._format_style('auth', 'boat.png', color=yellow, heading=heading)
+            result += self._format_style('base-lost', 'boat-invis.png', color=white, heading=heading)
+            result += self._format_style('fisher-lost', 'boat-invis.png', color=red, heading=heading)
+            result += self._format_style('tug-lost', 'boat-invis.png', color=green, heading=heading)
+            result += self._format_style('auth-lost', 'boat-invis.png', color=yellow, heading=heading)
+        
+        return result
+
+    def get_style_name(self, nmea, is_lost):
+        if (nmea.strmmsi.startswith('00') and not nmea.strmmsi.startswith('000')):
+            return 'landstation'
+        
+        if nmea.type==30: # Fisher ship
+            stylename = 'fisher'
+        elif nmea.type in (31, 32, 52): # Towing or Tug
+            stylename = 'tug'
+        elif nmea.type in (35, 53, 55): # Authority
+            stylename = 'auth'
+        else:
+            stylename = 'base'
+
+        if nmea.status in (1, 5, 6) or nmea.sog<0.5*AIS_SOG_SCALE:
+            stylename += '-stop'
+        else:
+            if is_lost:
+                stylename+='-lost'
+            
+            if nmea.cog != AIS_COG_NOT_AVAILABLE:
+                course = int(nmea.cog/10.) # ais format correction
+                course = (course+5)/10*10 % 360 # go to neareast 10°
+                stylename += '-%d' % course
+            elif nmea.heading != AIS_NO_HEADING:
+                course = (nmea.heading+5)/10*10 % 360 # go to neareast 10°
+                stylename += '-%d' % course
+        return stylename
+
+
+class PelagosStyle(Style):
+    def make_header(self):
+        white = None
+        green = 'ff86fd5f' # '5f-fd-86'
+        yellow = 'ff86eeff' #'ff-ee-86'
+        pink = 'ffff00ea' #'ea-00-ff'
+        red = 'ff5865fc' #'fc-65-58'
+
+        result = u''
+        result += self._format_style('landstation', 'capital_small.png')
+
+        result += self._format_style('base-stop', 'boat-stop.png', color=white)
+        result += self._format_style('cargo-stop', 'boat-stop.png', color=green)
+        result += self._format_style('tanker-stop', 'boat-stop.png', color=yellow)
+        result += self._format_style('hsc-stop', 'boat-stop.png', color=pink)
+        result += self._format_style('hazarda-stop', 'boat-stop.png', color=red)
+
+        for heading in [ None ] + range(0, 360, 10):
+            result += self._format_style('base', 'boat.png', color=white, heading=heading)
+            result += self._format_style('cargo', 'boat.png', color=green, heading=heading)
+            result += self._format_style('tanker', 'boat.png', color=yellow, heading=heading)
+            result += self._format_style('hsc', 'boat.png', color=pink, heading=heading)
+            result += self._format_style('hazarda', 'boat.png', color=red, heading=heading)
+
+            result += self._format_style('base-lost', 'boat-invis.png', color=white, heading=heading)
+            result += self._format_style('cargo-lost', 'boat-invis.png', color=green, heading=heading)
+            result += self._format_style('tanker-lost', 'boat-invis.png', color=yellow, heading=heading)
+            result += self._format_style('hsc-lost', 'boat-invis.png', color=pink, heading=heading)
+            result += self._format_style('hazarda-lost', 'boat-invis.png', color=red, heading=heading)
+        
+        return result
+
+    def get_style_name(self, nmea, is_lost):
+        if (nmea.strmmsi.startswith('00') and not nmea.strmmsi.startswith('000')):
+            return 'landstation'
+        
+        if nmea.type in (41, 61, 71, 81): # major hazard materials
+            stylename = 'hazarda'
+        elif nmea.type >= 70 and nmea.type <= 79:
+            stylename = 'cargo'
+        elif nmea.type >= 80 and nmea.type <= 89:
+            stylename = 'tanker'
+        elif nmea.type >= 40 and nmea.type <= 49:
+            stylename = 'hsc'
+        else:
+            stylename = 'base'
+
+        if nmea.status in (1, 5, 6) or nmea.sog<0.5*AIS_SOG_SCALE:
+            stylename += '-stop'
+        else:
+            if is_lost:
+                stylename+='-lost'
+            
+            if nmea.cog != AIS_COG_NOT_AVAILABLE:
+                course = int(nmea.cog/10.) # ais format correction
+                course = (course+5)/10*10 % 360 # go to neareast 10°
+                stylename += '-%d' % course
+            elif nmea.heading != AIS_NO_HEADING:
+                course = (nmea.heading+5)/10*10 % 360 # go to neareast 10°
+                stylename += '-%d' % course
+        return stylename
+
+STYLE = FishersStyle()
+
+
+def format_boat_data(nmea, timeinfo=None, display_options=0):
+    '''
+    timeinfo: None to generate a GoogleEarth 4 file, with no timeing information
+              True to generate a GoogleEarth 5 file, with start time from nmea
+              datetime or timestamp instance to generate a GoogleEarth 5 file, with start time from nmea and this enddate
+    '''
+    mmsi = nmea.strmmsi
+    timestamp_1, status, rot, sog, latitude, longitude, cog, heading, source_1 = Nmea1.to_values(nmea)
+    timestamp_5, imo, name, callsign, type, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source_5 = Nmea5.to_values(nmea)
+
+    if latitude==AIS_LAT_NOT_AVAILABLE or longitude==AIS_LON_NOT_AVAILABLE:
+        return u''
+
+    result = u''
+
+    if timeinfo is not None and timeinfo != True:
+        if not isinstance(timeinfo, datetime):
+            timeinfo = datetime.utcfromtimestamp(timeinfo)
+            
+    result += u'<Placemark>\n'
+
+    if not (display_options & KML_DISPLAYOPT_NONAMES):
+        result += u'<name>' + xml_escape(nmea.get_title()) + u'</name>\n'
+
+    result += u'<description><![CDATA[\n'
+    if display_options & KML_DISPLAYOPT_NONAMES:
+        result += u'Vessel name: ' + xml_escape(nmea.get_name()) + u'<br>\n'
+        
+    dt_1 = datetime.utcfromtimestamp(timestamp_1)
+    if display_options & KML_DISPLAYOPT_HISTORICAL:
+        result += u'%s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+        is_lost = None
+    else:
+        if timeinfo is None:
+            is_lost = dt_1 < datetime.utcnow()-LOST_PERIOD
+            if is_lost:
+                result += u'Tack <b>lost</b> since %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+            else:
+                result += u'Last seen %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+        else: # timeinfo is not None
+            if timeinfo == True:
+                is_lost = None
+            else:
+                is_lost = timeinfo > dt_1 + LOST_PERIOD
+
+    if not mmsi.isdigit():
+        result += u'NO MMSI<br>\n'
+        is_land_station = False
+    else:
+        result += u'MMSI: %s ' % mmsi
+        ref_mmsi = str(mmsi) # FIXME not needed
+        is_land_station = ref_mmsi.startswith('00') and not ref_mmsi.startswith('000')
+        if is_land_station:
+            ref_mmsi = ref_mmsi[2:]
+        result += u'('+COUNTRIES_MID.get(int(ref_mmsi[:3]), u'fake')+u')<br>\n'
+    if not is_land_station :
+        if imo:
+            #result += u'IMO<a href="http://www.xvas.it/SPECIAL/VTship.php?imo=%(imo)s&amp;mode=CK">%(imo)s</a><br>\n' % { 'imo': imo }
+            result += u'IMO: %s<br>\n' % imo
+        else:
+            result += u'no known IMO<br>\n'
+    callsign = nmea.get_callsign(default=None)
+    if callsign is not None:
+        result += u'Callsign: %s<br>\n' % xml_escape(callsign)
+    if type:
+        result += u'Type: %s<br>\n' % SHIP_TYPES.get(type, 'unknown')
+    if status != AIS_STATUS_NOT_AVAILABLE:
+        result += u'Status: %s<br>\n' %STATUS_CODES.get(status, 'unknown')
+    if cog != AIS_COG_NOT_AVAILABLE:
+        result += u'Course: %.01f°<br>\n' % (cog/10.)
+    if heading != AIS_NO_HEADING:
+        result += u'Heading: %d°<br>\n' % heading
+    if sog!=AIS_SOG_NOT_AVAILABLE:
+        if sog != AIS_SOG_FAST_MOVER:
+            result += u'Speed: %.01f kts<br>\n' % (sog/AIS_SOG_SCALE)
+        else:
+            result += u'Speed: more that than 102.2 kts<br>\n'
+    length = nmea.get_length()
+    width = nmea.get_width()
+    if length or width or draught:
+        result += u'Size: %dx%d' % (length, width)
+        if draught:
+            result += u'/%.01f' % (draught/10.)
+        result += u'm<br>\n'
+    destination = nmea.get_destination(default=None)
+    if destination:
+        result += u'Destination: %s<br>\n' % xml_escape(destination)
+    eta = nmea.get_eta_str(default=None)
+    if eta is not None:
+        result += u'ETA: %s<br>\n' % eta
+
+    if (display_options & KML_DISPLAYOPT_SOURCES) and (source_1 or source_5):
+        result += u'Source: '
+        if source_1:
+            result += Nmea.format_source(source_1)
+        if source_5 and source_1 != source_5:
+            result += u', '+ Nmea.format_source(source_5)
+        result += u'<br>\n'
+    result += u'<a href="https://ais.nirgal.com/vessel/%(mmsi)s/">More...</a>' % {'mmsi': mmsi }
+    result += u']]>\n'
+    result += u'</description>\n'
+
+    result += u'<styleUrl>#%s</styleUrl>\n' % STYLE.get_style_name(nmea, is_lost)
+
+    result += u'<Point>\n'
+    result += u'<coordinates>'+str(longitude/AIS_LATLON_SCALE)+','+str(latitude/AIS_LATLON_SCALE)+'</coordinates>'
+    result += u'</Point>\n'
+
+    if timeinfo is not None:
+        #result += u'<TimeStamp><when>%s</when></TimeStamp>\n' % (dt_1.strftime('%Y-%m-%dT%H:%M:%SZ'))
+        result += u'<gx:TimeSpan><begin>%s</begin>' % dt_1.strftime('%Y-%m-%dT%H:%M:%SZ')
+        if timeinfo != True:
+            result += u'<end>%s</end>' % timeinfo.strftime('%Y-%m-%dT%H:%M:%SZ')
+        result += u'</gx:TimeSpan>\n'
+    result += u'</Placemark>\n'
+    return result
+
+
+
+
+def format_fleet(mmsi_iterator, document_name=None):
+    result = u''
+    result += KML_HEADER
+
+    if document_name is None:
+        document_name = 'AIS database'
+    result += u'<name>%s</name>\n' % document_name
+    
+    result += STYLE.make_header()
+
+    long_ago = datetime_to_timestamp(datetime.utcnow() - timedelta(90))
+
+    for mmsi in mmsi_iterator:
+        nmea = Nmea.new_from_lastinfo(mmsi)
+        if nmea.get_last_timestamp() < long_ago:
+            continue
+        result += format_boat_data(nmea, display_options=KML_DISPLAYOPT_SOURCES)
+    result += KML_FOOTER
+    return result
+
+
+def format_boat_intime_section(nmea_iterator, kml_displayopt=0):
+    result = u''
+    last_nmea = None
+    for nmea in nmea_iterator:
+        if last_nmea is None:
+            timeinfo = True
+        else:
+            timeinfo = datetime.utcfromtimestamp(last_nmea.timestamp_1)
+
+        result += format_boat_data(nmea, timeinfo, kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+        last_nmea = copy.copy(nmea) # needed because nmea will be patched with new data
+    if not result:
+        result += u'<description>Vessel not found</description>'
+    return result
+
+
+def format_boat_intime(nmea_iterator):
+    result = u''
+    result += KML_HEADER
+    result += STYLE.make_header()
+    result += format_boat_intime_section(nmea_iterator)
+    result += KML_FOOTER
+    return result
+
+
+def format_boat_track_section(nmea_iterator, name=u''):
+    strcoordinates = '<Placemark>\n<LineString>\n<coordinates>\n'
+    segment_length = 0
+    for nmea in nmea_iterator:
+        if name == u'':
+            name = nmea.get_title()
+        if nmea.longitude != AIS_LON_NOT_AVAILABLE and nmea.latitude != AIS_LAT_NOT_AVAILABLE:
+            if segment_length>65000:
+                logging.debug('Line is too long. Spliting.')
+                strcoordinates += ' %.8f,%.8f' % (nmea.longitude/AIS_LATLON_SCALE, nmea.latitude/AIS_LATLON_SCALE)
+                strcoordinates += '</coordinates>\n</LineString>\n</Placemark>\n<Placemark>\n<LineString>\n<coordinates>\n'
+                segment_length = 0
+            else:
+                segment_length += 1
+            strcoordinates += ' %.8f,%.8f' % (nmea.longitude/AIS_LATLON_SCALE, nmea.latitude/AIS_LATLON_SCALE)
+    strcoordinates += '</coordinates>\n</LineString></Placemark>\n'
+
+    result = u''
+    result += u'<name>%s track</name>\n' % name
+    if len(strcoordinates)>39+2*(1+12+1+11)+42+1:
+        result += unicode(strcoordinates)
+    else:
+        result += u'<description>No data available</description>\n'
+    return result
+
+
+def format_boat_track(nmea_iterator):
+    result = u''
+    result += KML_HEADER
+    #result += STYLE.make_header()
+    result += format_boat_track_section(nmea_iterator)
+    result += KML_FOOTER
+    return result
+
+
+
+def kml_to_kmz(inputstr):
+    if isinstance(inputstr, unicode):
+        inputstr = inputstr.encode('utf-8')
+    output = StringIO()
+    zip = zipfile.ZipFile(output, 'w')
+    zip.writestr('doc.kml', inputstr)
+    for iconname in STYLE.used_icons:
+        zip.write('/usr/lib/ais/kmz_icons/'+iconname, iconname)
+    zip.close()
+    return output.getvalue()
+    
+
+if __name__ == '__main__':
+    from optparse import OptionParser, OptionGroup
+
+    parser = OptionParser(usage='%prog [options] { mmsi | @fleetfile }+ | all')
+
+    parser.add_option('-d', '--debug',
+        action='store_true', dest='debug', default=False,
+        help="debug mode")
+
+    parser.add_option('-e', '--end',
+        action='store', dest='sdt_end', metavar="'YYYYMMDD HHMMSS'",
+        help="End data processing on that GMT date time. Default is now. If a date is provided without time, time default to 235959.")
+    parser.add_option('-s', '--start',
+        action='store', dest='sdt_start', metavar="'YYYYMMDD HHMMSS'",
+        help="Start data processing on that date. Using that option enables multiple output of the same boat. Disabled by default. If a date is provided without time, time default to 000000. If other options enable multiple output, default to 1 day before --end date/time.")
+    parser.add_option('-g', '--granularity',
+        action='store', type='int', dest='granularity', metavar='SECONDS',
+        help="Dump only one position every granularity seconds. Using that option enables multiple output of the same boat. If other options enable multiple output, defaults to 600 (10 minutes)")
+    parser.add_option('--max',
+        action='store', type='int', dest='max_count', metavar='NUMBER',
+        help="Dump a maximum of NUMBER positions. Using that option enables multiple output of the same boat.")
+
+
+    parser.add_option('--filter-knownposition',
+        action='store_true', dest='filter_knownposition', default=False,
+        help="eliminate unknown positions from results.")
+
+    parser.add_option('--filter-speedcheck',
+        action='store', type='int', dest='speedcheck', default=200, metavar='KNOTS',
+        help="Eliminate erroneaous positions from results, based on impossible speed. Default is %default knots. 0 disables.")
+
+    parser.add_option('--filter-type',
+        action='append', type='int', dest='type_list', metavar="TYPE",
+        help="process a specific ship type.")
+    parser.add_option('--help-types',
+        action='store_true', dest='help_types', default=False,
+        help="display list of available types")
+
+    parser.add_option('--filter-area',
+        action='store', type='str', dest='area_file', metavar="FILE.KML",
+        help="only process a specific area as defined in a kml polygon file.")
+
+    #
+    parser.add_option('--format',
+        choices=('positions', 'track', 'animation'), dest='format', default='positions',
+        help="select output format: positions(*) or track or animation")
+
+    parser.add_option('--kml',
+        action='store_true', dest='output_kml', default=False,
+        help="Output a kml file. Default is output of a kmz file with icons")
+    parser.add_option('--inner-kml',
+        action='store_true', dest='output_innerkml', default=False,
+        help="Output a kml fragment file without the <Document> wrappers. File should be reproccessed to be usefull. That option implies --kml")
+
+    parser.add_option('--style',
+        choices=('fishers', 'pelagos'), dest='style', default='fishers',
+        help="select one of the predefined style for display: fishers(*) or pelagos")
+
+    parser.add_option('--icon-size',
+        action='store', type='float', dest='icon_size', metavar='SCALE', default=0.5,
+        help="Set icons size. Default = %default")
+    
+    parser.add_option('--no-names',
+        action='store_const', const=KML_DISPLAYOPT_NONAMES, dest='kml_displayopt_noname', default=0,
+        help="don't show ship names")
+
+    parser.add_option('--show-sources',
+        action='store_const', const=KML_DISPLAYOPT_SOURCES, dest='kml_displayopt_sources', default=0,
+        help="show information source")
+
+    #
+
+    expert_group = OptionGroup(parser, "Expert Options",
+        "You normaly don't need any of these")
+
+    expert_group.add_option('--db',
+        action='store', dest='db', default=DBPATH,
+        help="path to filesystem database. Default=%default")
+    parser.add_option_group(expert_group)
+
+    options, args = parser.parse_args()
+
+    
+    if options.help_types:
+        keys = SHIP_TYPES.keys()
+        keys.sort()
+        for k in keys:
+            print k, SHIP_TYPES[k]
+        sys.exit(0)
+
+    DBPATH = options.db
+
+    if options.debug:
+        loglevel = logging.DEBUG
+    else:
+        loglevel = logging.INFO
+    logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s')
+
+    #
+    # Ships selections
+    #
+
+    if len(args)==0:
+        print >> sys.stderr, "No ship to process"
+        sys.exit(1)
+
+    target_mmsi_iterator = []
+    all_targets = False
+    for arg in args:
+        if arg=='all':
+            all_targets = True
+        elif arg.startswith('@'):
+            target_mmsi_iterator += load_fleet_to_uset(arg[1:])
+        else:
+            target_mmsi_iterator.append(arg)
+    if all_targets:
+        if target_mmsi_iterator:
+            logging.warning('Selecting all ships, ignoring other arguments')
+        target_mmsi_iterator = all_mmsi_generator()
+
+    #
+    # Dates selections
+    #
+
+    if options.sdt_end:
+        # remove non digit characters
+        options.sdt_end = "".join([ c for c in options.sdt_end if c.isdigit()])
+        if len(options.sdt_end)==14:
+            dt_end = datetime.strptime(options.sdt_end, '%Y%m%d%H%M%S')
+        elif len(options.sdt_end)==8:
+            dt_end = datetime.strptime(options.sdt_end, '%Y%m%d')
+            dt_end = datetime.combine(dt_end.date(), time(23,59,59))
+        else:
+            print >> sys.stderr, "Invalid format for --end option"
+            sys.exit(1)
+    else:
+        dt_end = datetime.utcnow()
+    logging.debug('--end is %s', dt_end)
+
+    if options.sdt_start or options.granularity is not None or options.max_count or options.format in ('animation', 'track'):
+        # time period is enabled
+        if options.sdt_start:
+            options.sdt_start = "".join([ c for c in options.sdt_start if c.isdigit()])
+            if len(options.sdt_start)==14:
+                dt_start = datetime.strptime(options.sdt_start, '%Y%m%d%H%M%S')
+            elif len(options.sdt_start)==8:
+                dt_start = datetime.strptime(options.sdt_start, '%Y%m%d')
+            else:
+                print >> sys.stderr, "Invalid format for --start option"
+                sys.exit(1)
+        else:
+            dt_start = dt_end - timedelta(1)
+        if options.granularity is None:
+            options.granularity = 600
+    else:
+        dt_start = None
+        options.max_count = 1
+        if options.granularity is None:
+            options.granularity = 600
+    logging.debug('--start is %s', dt_start)
+
+
+    #
+    # Filters
+    #
+
+    filters=[]
+    
+    if options.filter_knownposition:
+        filters.append(filter_knownposition)
+
+    if options.speedcheck != 0:
+        maxmps = options.speedcheck / 3600. # from knots to NM per seconds
+        filters.append(lambda nmea: filter_speedcheck(nmea, maxmps))
+
+    if options.area_file:
+        area = load_area_from_kml_polygon(options.area_file)
+        filters.append(lambda nmea: filter_area(nmea, area))
+
+    if options.type_list:
+        def filter_type(nmea):
+            #print nmea.type, repr(options.type_list), nmea.type in options.type_list
+            #print repr(nmea.get_dump_row())
+            return nmea.type in options.type_list
+        filters.append(filter_type)
+
+    #
+    # Display options
+    #
+
+    if options.style == 'pelagos':
+        STYLE = PelagosStyle()
+
+    kml_displayopt = options.kml_displayopt_noname | options.kml_displayopt_sources
+
+    STYLE.icon_size = options.icon_size
+
+    if options.output_innerkml:
+        options.output_kml = True
+    #
+    # Processing
+    #
+
+    if options.format=='positions':
+        result = u''
+        if not options.output_innerkml:
+            result += KML_HEADER
+            result += STYLE.make_header()
+        for mmsi in target_mmsi_iterator:
+            nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
+            for nmea in nmea_generator:
+                result += format_boat_data(nmea, None, kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+        if not options.output_innerkml:
+            result += KML_FOOTER
+
+    elif options.format=='animation':
+        result = u''
+        if not options.output_innerkml:
+            result += KML_HEADER
+            result += STYLE.make_header()
+        for mmsi in target_mmsi_iterator:
+            nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
+            result += '<Folder>\n'
+            result += format_boat_intime_section(nmea_generator, kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+            result += '</Folder>\n'
+        if not options.output_innerkml:
+            result += KML_FOOTER
+               
+    elif options.format=='track':
+        result = u''
+        if not options.output_innerkml:
+            result += KML_HEADER
+            # don't call STYLE.make_header since there is no icons
+        for mmsi in target_mmsi_iterator:
+            nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
+            result += '<Folder>\n'
+            result += format_boat_track_section(nmea_generator)
+            result += '</Folder>\n'
+        if not options.output_innerkml:
+            result += KML_FOOTER
+
+    else:
+        print >> sys.stderr, 'Unknown output format'
+        sys.exit(1)
+        
+    result = result.encode('utf-8')
+
+    if not options.output_kml:
+        result = kml_to_kmz(result)
+
+    print result
diff --git a/bin/udp.py b/bin/udp.py
new file mode 100755 (executable)
index 0000000..f30e494
--- /dev/null
@@ -0,0 +1,256 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+import sys, os, logging
+from time import time as get_timestamp
+from datetime import datetime
+from socket import *
+import rrdtool
+
+STATS_RATE = 60
+NMEA_DIR = '/var/lib/ais/nmea'
+STATS_DIR = '/var/lib/ais/stats'
+
+def formataddr(addr):
+    if addr.startswith('::ffff:'):
+        return addr[7:]
+    else:
+        return '['+addr+']'
+
+
+dumpfile = None
+last_t = None
+last_dumpdate = None
+last_from = None
+def log_line(serverport, t, from_, line):
+    global dumpfile, last_dumpdate, last_t, last_from
+
+    if t != last_t:
+        dumpdate = datetime.utcfromtimestamp(t).date()
+        if dumpdate != last_dumpdate:
+            if dumpfile:
+                dumpfile.close()
+            filename = os.path.join(NMEA_DIR, 'hub-'+str(serverport)+'-'+dumpdate.strftime('%Y%m%d')+'.dump')
+            logging.info('Opening %s', filename)
+            dumpfile = file(filename, 'a', 1)
+            last_dumpdate = dumpdate
+            last_from = None # make sure From: is present in new files
+        
+        dumpfile.write('Timestamp: %d\n' % t)
+        last_t = t
+
+    if from_ != last_from:
+        dumpfile.write('From: %s:%s\n' % from_)
+        last_from = from_
+
+    dumpfile.write(line+'\n')
+
+
+inpeers = {}
+class InPeer:
+    def __init__(self, addr, port=None):
+        self.addr = addr
+        self.port = port
+        self.data = '' # previous line fragment that was not fully parsed
+        # total values
+        self.npackets = 0 # ip packet count
+        self.nlines = 0 # aivdm line count
+        self.nbytes = 0 # byte count - without UDP header
+        self.nbytes_ethernet = 0 # byte count - with UDP header
+        # previous values
+        self.prev_npackets = 0 # ip packet count
+        self.prev_nlines = 0 # aivdm line count
+        self.prev_nbytes = 0 # byte count - without UDP header
+        self.prev_nbytes_ethernet = 0 # byte count - with UDP header
+        self.stats_last_time = get_timestamp()
+
+    def __repr__(self):
+        result = formataddr(self.addr)
+        if options.sport:
+            result += ':'+str(self.port)
+        return result
+
+    def get_stats(self):
+        newt = get_timestamp()
+        assert self.stats_last_time != newt, "Can't generate statistical rates"
+        delta_t = self.stats_last_time - t
+        self.stats_last_time = t
+        rpackets = float(self.prev_npackets - self.npackets) / delta_t
+        self.prev_npackets = self.npackets
+        rlines = float(self.prev_nlines - self.nlines) / delta_t
+        self.prev_nlines = self.nlines
+        rbytes = float(self.prev_nbytes - self.nbytes) / delta_t
+        self.prev_nbytes = self.nbytes
+        rbytes_ethernet = float(self.prev_nbytes_ethernet - self.nbytes_ethernet) / delta_t
+        self.prev_nbytes_ethernet = self.nbytes_ethernet
+        return rpackets, rlines, rbytes, rbytes_ethernet
+
+    @staticmethod
+    def get_by_addr(recv_from_info):
+        addr, port, flowinfo, scopeid = recv_from_info
+        if options.sport:
+            id = addr, port
+        else:
+            id = addr
+        if flowinfo != 0 or scopeid != 0:
+            logging.debug('Weird rcv_from_info: %s', recv_from_info)
+
+        if id not in inpeers:
+            inpeer = InPeer(addr, port)
+            inpeers[id] = inpeer
+            logging.info('New connection from %s', inpeer)
+            
+        return inpeers[id]
+
+if __name__ == "__main__":
+    from optparse import OptionParser
+    parser = OptionParser('%prog [options] port')
+    parser.add_option('-d', '--debug', help="debug mode", action='store_true', dest='debug', default=False)
+    parser.add_option('-f', '--forward', help="forward information to this peer.\nEg [::ffff:12.23.34.45]:1234", action='append', dest='peers', default=[])
+    #parser.add_option('--stats', help="Seconds between statistics repports. 0 to disable", action='store', type='int', dest='stat_rate', default=0)
+    parser.add_option('--rrd', help="RRD file to store statistics. Path is %s" % STATS_DIR, action='store', type='str', dest='rrd_file', default=None)
+    parser.add_option('--stdout', help="Print incoming packets to stdout", action='store_true', dest='stdout', default=False)
+    parser.add_option('--src-port', help="Consider (source1, port1) and (source1, port2) different sources", action='store_true', dest='sport', default=False)
+    options, args = parser.parse_args()
+
+    if options.debug:
+        loglevel = logging.DEBUG
+    else:
+        loglevel = logging.INFO
+    logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s')
+
+    outpeers = {}
+    for peer in options.peers:
+        colpos = peer.rfind(':')
+        if colpos==1:
+            print >> sys.stderr, 'Missing column in host:port string'
+        host = peer[:colpos]
+        if host.startswith('['):
+            assert host[-1]==']'
+            host = host[1:-1]
+        port = int(peer[colpos+1:])
+        outpeers[(host, port)] = 0
+
+    if len(args)!=1:
+        print >> sys.stderr, "Missing port"
+        sys.exit(1)
+    else:
+        serverport = int(args[0])
+
+    try:
+        s = socket(AF_INET6, SOCK_DGRAM)
+        s.settimeout(STATS_RATE)
+        s.bind(('', serverport))
+        last_stat = int(get_timestamp()) # drop second fragments
+        while True:
+            try:
+                data, recv_from_info = s.recvfrom(1500) # MTU
+                
+                inpeer = InPeer.get_by_addr(recv_from_info)
+                inpeer.npackets += 1
+                inpeer.nbytes += len(data)
+                inpeer.nbytes_ethernet += len(data) + 28 # UDP header size
+                logging.debug('IN %s:%s %s', formataddr(recv_from_info[0]), recv_from_info[1], repr(data))
+
+                t = int(get_timestamp())
+                data = inpeer.data + data
+                while True:
+                    idx_line_end = sys.maxint
+                    for c in '\r\0\n':
+                        idx = data.find(c)
+                        if idx != -1 and idx < idx_line_end:
+                            idx_line_end = idx
+                    if idx_line_end == sys.maxint:
+                        break
+
+                    #logging.debug('data=%s idxll=%s', repr(data), idx_line_end)
+                    line = data[:idx_line_end]
+                    while data[idx_line_end] in '\r\n\0' and idx_line_end<len(data) - 1:
+                        idx_line_end += 1
+                    if idx_line_end==len(data)-1:
+                        data = ''
+                    else:
+                        data = data[idx_line_end:]
+                    logging.debug('line=%s data=%s', repr(line), repr(data))
+
+                    while len(line) and line[-1] in '\r\n\0':
+                        line = line[:-1] # remove extra trails
+                    if not len(line):
+                        continue # ignore empty line
+                    
+                    inpeer.nlines += 1
+
+                    if options.stdout:
+                        sys.stdout.write(line+'\r\n')
+                    
+                    
+                    # dump the line to file
+                    log_line(serverport, t, (formataddr(inpeer.addr), recv_from_info[1]), line)
+                
+                    # forward the line
+                    logging.debug('OUT %s', repr(line+'\r\n'))
+                    for outpeer, stats in outpeers.iteritems():
+                        #logging.debug('OUT %s:%s %s', formataddr(outpeer[0]), outpeer[1], repr(line+'\r\n'))
+                        s.sendto(line+'\r\n', outpeer)
+                        outpeers[outpeer] += 1
+
+                # we don't want any stdout buffering
+                if options.stdout:
+                    sys.stdout.flush()
+                
+                inpeer.data = data
+
+            except timeout:
+                t = int(get_timestamp())
+            
+            # log statistics every STATS_RATE seconds
+            if t - last_stat > STATS_RATE:
+                logging.debug('Statistics: %d input sources', len(inpeers.keys()))
+                total_rpackets = 0.0
+                total_rlines = 0.0
+                total_rbytes = 0.0
+                total_rbytes_ethernet = 0.0
+                for inpeer in inpeers.itervalues():
+                    rpackets, rlines, rbytes, rbytes_ethernet = inpeer.get_stats()
+                    #logging.debug('    %s - %d packets - %d AIVDM lines - %d bytes payload - %d bytes internet', inpeer, inpeer.npackets, inpeer.nlines, inpeer.nbytes, inpeer.nbytes_ethernet)
+                    logging.debug('    %s - %.1f packets/s - %.1f AIVDM lines/s - %d bps payload - %d bps internet', inpeer, rpackets, rlines, 8*rbytes, 8*rbytes_ethernet)
+                    total_rpackets += rpackets
+                    total_rlines += rlines
+                    total_rbytes += rbytes
+                    total_rbytes_ethernet += rbytes_ethernet
+                logging.info('Rates: %.1f packets/s - %.1f AIVDM lines/s - %d bps payload - %d bps internet', total_rpackets, total_rlines, 8*total_rbytes, 8*total_rbytes_ethernet)
+                #logging.info('Statistics: %d OUT peers', len(outpeers.keys()))
+                #for outpeer, n in outpeers.iteritems():
+                #    logging.info('    %s:%s %d packets', formataddr(outpeer[0]), outpeer[1], n)
+                if options.rrd_file:
+                    rrd_file = os.path.join(STATS_DIR, options.rrd_file)
+                    try:
+                        logging.debug('rrdtool <- N:%s:%s:%s:%s' % (total_rpackets, total_rlines, total_rbytes, total_rbytes_ethernet))
+                        rrdtool.update(rrd_file, 'N:%s:%s:%s:%s' % (total_rpackets, total_rlines, total_rbytes, total_rbytes_ethernet))
+                    except rrdtool.error, e:
+                        logging.error(e)
+                        if 'No such file or directory' in e.message:
+                            # every minute for 6 hours
+                            # every 10 minutes for 48 hours
+                            # every hour for 2 weeks
+                            # every 6 hours for 90 days
+                            # every day for 1 year
+                            # every 7 days for 5 years
+                            args = rrd_file, \
+                                '-s', '60', \
+                                'DS:packets:GAUGE:120:0:U', \
+                                'DS:lines:GAUGE:120:0:U', \
+                                'DS:bytes:GAUGE:120:0:U', \
+                                'DS:rawbytes:GAUGE:120:0:U', \
+                                'RRA:AVERAGE:0.9:1:360', \
+                                'RRA:AVERAGE:0.9:10:288', \
+                                'RRA:AVERAGE:0.9:60:336', \
+                                'RRA:AVERAGE:0.9:360:360', \
+                                'RRA:AVERAGE:0.9:1440:366', \
+                                'RRA:AVERAGE:0.9:10080:261'
+                            logging.warning('Generating stat file %s', args)
+                            rrdtool.create(*args)
+                last_stat = t
+    except KeyboardInterrupt:
+        logging.critical('Received Ctrl+C. Shuting down.')
+
diff --git a/html_templates/base.html b/html_templates/base.html
new file mode 100644 (file)
index 0000000..1978257
--- /dev/null
@@ -0,0 +1,50 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<title>{% block title %}AIS{% endblock %}</title>
+<style>
+body {
+margin: 0;
+font-family: sans-serif;
+}
+#header {
+background: lightblue;
+border-bottom: 1px solid darkblue;
+padding: 5px;
+}
+#header #bannertitle {
+float: left;
+font-size: 250%;
+}
+#header #bannermenu {
+float: right;
+}
+#body {
+margin: 1ex;
+}
+#footer {
+text-align: center;
+background: lightblue;
+border-top: 1px solid darkblue;
+font-size: 70%;
+}
+</style>
+<div id=header>
+    <span id=bannertitle>AIS database</span>
+    <span id=bannermenu>Welcome <a href="/user/{{user.login}}/">{{ user.name }}</a> - <a href="/user/{{user.login}}/change_password">change password</a></span>
+    <br clear=both>
+</div>
+<div id=body>
+
+{% block breadcrumbs %}
+<a href='/'>home</a>
+{% endblock %}
+<br>
+<br>
+
+{% block content %}{% endblock %}
+</div>
+
+<br>
+<br>
+<div id=footer>
+This is <a href="http://www.gnu.org/licenses/gpl.html">GPL software</a> • <a href="http://www.debian.org/">Debian/GNU Linux</a> powered • <a href="http://www.ipv6.org/">IPv6 ready</a> • That server uses exclusively <a href="http://www.enercoop.coop/">renewable energy</a>
+</div>
diff --git a/html_templates/fleet.html b/html_templates/fleet.html
new file mode 100644 (file)
index 0000000..2d10617
--- /dev/null
@@ -0,0 +1,14 @@
+{% extends "fleets.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/fleet/{{fleet.name}}/">{{ fleet.name }}</a>
+{% endblock %}
+
+{% block content %}
+<h3>Details of fleet {{ fleet.name }}</h3>
+{{ fleet.description }}<br><br>
+<li><a href="lastpos">Last positions</a>
+<li><a href="list">Vessel list</a> ({{ fleet.vessel_count }} vessels)
+<li><a href="users">User list</a> ({{ fleet.user_count }} users)
+{% endblock %}
diff --git a/html_templates/fleet_users.html b/html_templates/fleet_users.html
new file mode 100644 (file)
index 0000000..7cf6bbf
--- /dev/null
@@ -0,0 +1,28 @@
+{% extends "fleet.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/fleet/{{fleet.name}}/users">users</a>
+{% endblock %}
+
+{% block content %}
+{{ message }}
+
+<h3>Users of {{ fleet.name }} fleet</h3>
+{% for fleetuser in fleetusers %}
+<li><a href='/user/{{fleetuser.user.login}}/'>{{ fleetuser.user.name }}</a>
+<span style="font-size:80%"><a href='?action=revoke&amp;user={{fleetuser.user.login}}'>revoke access</a></span>
+{% endfor %}
+
+<h3>Add a user</h3>
+<form method=get>
+<input type=hidden name=action value=add>
+<select name=user>
+{% for user in otherusers %}
+<option value={{user.login}}>{{ user.name }}</option>
+{% endfor %}
+</select>
+<input type=submit value=Share>
+</form>
+You can't find the person you want? Do not give your password. Just <a href='/user/add'>create a user account</a>, then come back to that page.
+{% endblock %}
diff --git a/html_templates/fleet_vessel_add.html b/html_templates/fleet_vessel_add.html
new file mode 100644 (file)
index 0000000..9d38ea5
--- /dev/null
@@ -0,0 +1,17 @@
+{% extends "fleet.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/fleet/{{fleet.name}}/add_vessel">add vessel</a>
+{% endblock %}
+
+{% block content %}
+<h3>Vessels in {{ fleet.name }} fleet</h3>
+
+<form method="post">
+<table>
+{{ form.as_table }}
+</table>
+<input type=submit>
+</form>
+{% endblock %}
diff --git a/html_templates/fleet_vessels.html b/html_templates/fleet_vessels.html
new file mode 100644 (file)
index 0000000..a2086d3
--- /dev/null
@@ -0,0 +1,36 @@
+{% extends "fleet.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/fleet/{{fleet.name}}/list">list</a>
+{% endblock %}
+
+{% block content %}
+<h3>Vessels in {{ fleet.name }} fleet</h3>
+
+<form method=post action=add>Add a vessel by MMSI: <input name=mmsi><input type=submit value=Add></form>
+{% if vessels %}
+<table>
+<tr>
+  <th>MMSI
+  <th>Name
+  <th>IMO
+  <th>Callsign
+  <th>Type
+  <th>Updated
+{% for vessel in vessels %}
+{% with vessel.get_last_nmea as nmea %}
+<tr>
+  <td><a href='/vessel/{{nmea.strmmsi}}/'>{{ nmea.get_mmsi_public }}</a>
+  <td><a href='/vessel/{{nmea.strmmsi}}/'>{{ nmea.get_name }}</a>
+  <td>{{ vessel.imo|default_if_none:""}}
+  <td>{{ nmea.get_callsign }}
+  <td>{{ nmea.get_shiptype }}
+  <td>{{ nmea.get_last_updated_str }}
+{% endwith %}
+{% endfor %}
+</table>
+{% else %}
+Fleet is empty.
+{% endif %}
+{% endblock %}
diff --git a/html_templates/fleets.html b/html_templates/fleets.html
new file mode 100644 (file)
index 0000000..468a80b
--- /dev/null
@@ -0,0 +1,15 @@
+{% extends "base.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/fleet/">fleet</a>
+{% endblock %}
+
+{% block content %}
+<h3>Select a fleet:</h3>
+{% for fleetuser in fleetusers %}
+<h4><a href='{{ fleetuser.fleet.name }}/'>{{ fleetuser.fleet.name }}</a></h4>
+{{ fleetuser.fleet.vessel_count }} vessels - {{ fleetuser.fleet.user_count }} users<br>
+{{ fleetuser.fleet.description }}
+{% endfor %}
+{% endblock %}
diff --git a/html_templates/index.html b/html_templates/index.html
new file mode 100644 (file)
index 0000000..053a520
--- /dev/null
@@ -0,0 +1,31 @@
+{% extends "base.html" %}
+
+{% block content %}
+<h2>Main menu</h2>
+<li> <a href='vessel/'>Vessels</a>
+<li> <a href='fleet/'>Fleets</a>
+<li> <a href='user/'>Users<a/>
+<hr>
+<h3>External links</h3>
+<li><a href='http://www.marinetraffic.com/' target='_blank'>Marine Traffic</a>
+<li><a href='http://www.seasearcher.com/' target='_blank'>Sea Searcher</a>
+<li><a href='http://www.vesseltracker.com/' target='_blank'>Vessel Tracker</a>
+<li><a href='http://www.aislive.com/' target='_blank'>AIS Live</a>
+<li><a href='http://www.digital-seas.com/' target='_blank'>Digital Seas</a>
+<li><a href='http://www.aishub.net/' target='_blank'>Ais Hub</a>
+<li><a href='http://www.coaa.co.uk/shipplotter.htm' target='_blank'>Ship Plotter</a>
+<li><a href='http://en.wikipedia.org/wiki/Automatic_Identification_System' target='_blank'>AIS on Wikipedia</a>
+<li><a href='http://www.dxinfocentre.com/tropo_eur.html' target='_blank'>Tropospheric Ducting Forecasts: Europe</a>
+<hr>
+<h3>News</h3>
+<b>2010-05-27</b><br>
+Some <a href="source/">statistics</a> are now available for many sources.<br>
+<br>
+<b>2010-05-24</b><br>
+System is now automatically checking ICCAT web site for updates in the official tuna involved fleet: Contact me if you're interrested in receiving that information.<br>
+<br>
+<b>2010-05-17</b><br>
+Peering with aishub.net activated. Quick and dirty: no statistics, no logs.<br>
+Users can now share fleets / revoke access by themselves.<br>
+Added fleet descriptions and creation dates.<br>
+{% endblock %}
diff --git a/html_templates/logout.html b/html_templates/logout.html
new file mode 100644 (file)
index 0000000..464f158
--- /dev/null
@@ -0,0 +1,5 @@
+{% extends "base.html" %}
+
+{% block content %}
+Have a nice day!
+{% endblock %}
diff --git a/html_templates/sources.html b/html_templates/sources.html
new file mode 100644 (file)
index 0000000..c4e4114
--- /dev/null
@@ -0,0 +1,22 @@
+{% extends "base.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/source/">sources</a>
+{% endblock %}
+
+{% block content %}
+<center>
+{% for source in sources %}
+{% for period in periods %}
+<img src='/stats/{{source}}-{{period.name_tiny}}-bytes.png'>
+{% endfor %}
+<br>
+{% for period in periods %}
+<img src='/stats/{{source}}-{{period.name_tiny}}-counts.png'>
+{% endfor %}
+<br>
+<br>
+{% endfor %}
+</center>
+{% endblock %}
diff --git a/html_templates/user_change_password.html b/html_templates/user_change_password.html
new file mode 100644 (file)
index 0000000..258ebee
--- /dev/null
@@ -0,0 +1,21 @@
+{% extends "user_edit.html" %}
+
+{% block title %}
+Password change - AIS
+{% endblock %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/user/{{auser.login}}/change_password">change password</a>
+{% endblock %}
+
+{% block content %}
+<h3>Change password of {{ auser.name }}</h3>
+
+<form method="post">
+<table>
+{{ form.as_table }}
+</table>
+<input type=submit>
+</form>
+{% endblock %}
diff --git a/html_templates/user_delete.html b/html_templates/user_delete.html
new file mode 100644 (file)
index 0000000..08ffd34
--- /dev/null
@@ -0,0 +1,15 @@
+{% extends "user_edit.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/user/{{auser.login}}/delete">delete</a>
+{% endblock %}
+
+{% block content %}
+<h3>Delete user acccount {{ auser.name }}</h3>
+You are about to delete acount {{ auser.name }}. <br>
+This will also delete sub-accounts.<br>
+Are you sure?<br>
+<a href="?confirm=yes">Yes, delete it</a><br>
+
+{% endblock %}
diff --git a/html_templates/user_detail.html b/html_templates/user_detail.html
new file mode 100644 (file)
index 0000000..3dd56e2
--- /dev/null
@@ -0,0 +1,30 @@
+{% extends "users.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+{% if auser.id %}
+&gt; <a href="/user/{{auser.login}}/">{{auser.login}}</a>
+{% else %}
+&gt; <a href="/user/add">add</a>
+{% endif %}
+{% endblock %}
+
+{% block content %}
+<h3>AIS user detail</h3>
+
+Full name: <b>{{ auser.name }}</b><br>
+Login name: <b>{{ auser.login }}</b><br>
+{% if auser.admin_ok %}
+Email: <b>{{ auser.email }}</b><br>
+{% endif %}
+<br>
+Account created {% if auser.father %}by <b><a href="../{{auser.father.login}}/">{{ auser.father.name }}</a></b>{% endif %} on <b>{{ auser.creation_datetime|date}}.</b><br>
+<br>
+
+{% if auser.admin_ok %}
+Sub account administration:<br>
+<a href="edit">edit account</a> -
+<a href="change_password">change password</a> -
+<a href="delete">delete account</a>
+{% endif %}
+{% endblock %}
diff --git a/html_templates/user_edit.html b/html_templates/user_edit.html
new file mode 100644 (file)
index 0000000..d099de3
--- /dev/null
@@ -0,0 +1,19 @@
+{% extends "user_detail.html" %}
+
+{% block breadcrumbs %}
+{% if auser.id %}
+{{ block.super }}
+&gt; <a href="/user/{{auser.login}}/edit">edit</a>
+{% endif %}
+{% endblock %}
+
+{% block content %}
+<h3>AIS user edition</h3>
+
+<form method="post">
+<table>
+{{ form.as_table }}
+</table>
+<input type=submit>
+</form>
+{% endblock %}
diff --git a/html_templates/users.html b/html_templates/users.html
new file mode 100644 (file)
index 0000000..4c28fee
--- /dev/null
@@ -0,0 +1,22 @@
+{% extends "base.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }} &gt; <a href="/user/">user</a>
+{% endblock %}
+
+{% block content %}
+<h3>User accounts</h3>
+<a href=add>+ Create new account</a>
+<br>
+<br>
+{% for auser in users %}
+<li><a href='{{ auser.login }}/'>{{ auser.name }}</a>
+{%comment%}{% if auser.father %}sponsor: {{ auser.father }}{% endif %}{%endcomment%}
+{% if auser.password_hash %}{% else %}<b>Account disabled: No password</b>{% endif %}
+{% if auser.admin_ok %}
+<span style="font-size:80%">
+<a href='{{auser.login}}/change_password'>change password</a> - <a href='{{auser.login}}/delete'>delete account</a>
+</span>
+{% endif %}
+{% endfor %}
+{% endblock %}
diff --git a/html_templates/vessel.html b/html_templates/vessel.html
new file mode 100644 (file)
index 0000000..53667d7
--- /dev/null
@@ -0,0 +1,51 @@
+{% extends "vessel_index.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/vessel/{{nmea.strmmsi}}/">{{nmea.strmmsi}}</a>
+{% endblock %}
+
+{% block content %}
+<title>{{ nmea.get_title}} - ship details - AIS</title>
+<h3>{{ nmea.get_title }}</h3>
+MMSI: {{ nmea.get_mmsi_public }}<br>
+Last updated: {{ nmea.get_last_updated_str }}<br>
+Flag: {{ nmea.get_flag }}<br>
+{% comment %}Imo: <a href='http://www.xvas.it/SPECIAL/xVAS4Ts.php?imo={{ nmea.imo }}&amp;mode='>{{ nmea.imo }}</a><br>{% endcomment %}
+Imo: {{ nmea.imo }}<br>
+Callsign: {{ nmea.get_callsign }}<br>
+Vessel type: {{ nmea.get_shiptype }}<br>
+{% if nmea.get_length %}
+Length: {{ nmea.get_length }} meters<br>
+{% endif %}
+{% if nmea.get_width %}
+Width: {{ nmea.get_width }} meters<br>
+{% endif %}
+Draught: {{ nmea.get_draught_str }}<br>
+<br>
+Status: {{ nmea.get_status }}<br>
+Speed over ground: {{ nmea.get_sog_str }}<br>
+Rotation: {{ nmea.get_rot_str }}<br>
+Latitude: {{ nmea.get_latitude_str }}<br>
+Longitude: {{ nmea.get_longitude_str }}<br>
+Course over ground: {{ nmea.get_cog_str }}<br>
+Heading: {{ nmea.get_heading_str }}<br>
+<br>
+Destination: {{ nmea.get_destination }}<br>
+ETA: {{ nmea.get_eta_str }}<br>
+<br>
+Sources: position by {{ nmea.get_source_1_str }}, voyage by {{ nmea.get_source_5_str }}<br>
+<h2>Get historical track</h2>
+<form action='track'>
+Last <input name=ndays size=3 value=7> days <br>
+Definition <input name=grain size=4 value=3600> seconds<br>
+<input type=submit value=Get>
+</form>
+
+<h2>Get historical animation</h2>
+<form action='animation'>
+Last <input name=ndays size=3 value=7> days <br>
+Definition <input name=grain size=4 value=3600> seconds<br>
+<input type=submit value=Get>
+</form>
+{% endblock %}
diff --git a/html_templates/vessel_index.html b/html_templates/vessel_index.html
new file mode 100644 (file)
index 0000000..9a30120
--- /dev/null
@@ -0,0 +1,16 @@
+{% extends "base.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/vessel/">vessel</a>
+{% endblock %}
+
+{% block content %}
+<h3>Search for a vessel</h3>
+<form method=post>
+<table>
+{{ form.as_table }}
+</table>
+<input type=submit>
+</form>
+{% endblock %}
diff --git a/html_templates/vessel_manual_input.html b/html_templates/vessel_manual_input.html
new file mode 100644 (file)
index 0000000..9d2db77
--- /dev/null
@@ -0,0 +1,21 @@
+{% extends "vessel.html" %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/vessel/{{nmea.strmmsi}}/manual_input">manual input</a>
+{% endblock %}
+
+{% block content %}
+<title>{{ nmea.get_title}} - ship details - AIS</title>
+
+<h3>{{ nmea.get_title }} - Manual input</h3>
+Use this form to enter data aquired without AIS.
+<p>
+<form method="post">
+<table>
+{{ form.as_table }}
+</table>
+<input type=submit>
+</form>
+
+{% endblock %}
diff --git a/html_templates/vessels.html b/html_templates/vessels.html
new file mode 100644 (file)
index 0000000..168bd98
--- /dev/null
@@ -0,0 +1,30 @@
+{% extends "vessel_index.html" %}
+
+{% load ais_extras %}
+
+{% block breadcrumbs %}
+{{ block.super }}
+&gt; <a href="/vessel/search">search</a>
+{% endblock %}
+
+{% block content %}
+<h3>Vessel search results</h3>
+{% if vessels %}
+<table>
+<tr>
+  <th>MMSI
+  <th>Name
+  <th>IMO
+  <th>Callsign
+{% for vessel in vessels %}
+<tr>
+  <td><a href='/vessel/{{vessel.mmsi|sqlmmsi_to_strmmsi}}/'>{{ vessel.mmsi|sqlmmsi_to_strmmsi }}</a>
+  <td><a href='/vessel/{{vessel.mmsi|sqlmmsi_to_strmmsi}}/'>{{ vessel.name }}</a>
+  <td>{{ vessel.imo|default_if_none:""}}
+  <td>{{ vessel.callsign }}
+{% endfor %}
+</table>
+{% else %}
+No result.
+{% endif %}
+{% endblock %}
diff --git a/kmz_icons/boat-invis.png b/kmz_icons/boat-invis.png
new file mode 100644 (file)
index 0000000..73b0913
Binary files /dev/null and b/kmz_icons/boat-invis.png differ
diff --git a/kmz_icons/boat-stop.png b/kmz_icons/boat-stop.png
new file mode 100644 (file)
index 0000000..a6a1583
Binary files /dev/null and b/kmz_icons/boat-stop.png differ
diff --git a/kmz_icons/boat.png b/kmz_icons/boat.png
new file mode 100644 (file)
index 0000000..dbbdef7
Binary files /dev/null and b/kmz_icons/boat.png differ
diff --git a/kmz_icons/capital_small.png b/kmz_icons/capital_small.png
new file mode 100644 (file)
index 0000000..94677e1
Binary files /dev/null and b/kmz_icons/capital_small.png differ
diff --git a/kmz_icons/plane.png b/kmz_icons/plane.png
new file mode 100644 (file)
index 0000000..af2c3be
Binary files /dev/null and b/kmz_icons/plane.png differ