+__pycache__
etc/config
etc/database
+etc/httpd.conf
etc/planeplotter
etc/shipplotter
= Required packages
===================
-libgps-dev, postgresql, python (>= 2.5), python-serial, python-rrdtool, python-psycopg2, python-cracklib, libjs-jquery, apache2, libapache2-mod-python, python-django, xz-utils, python-daemon, ntp, python-daemon
+#NNNlibgps-dev, postgresql, python (>= 2.5), python-serial, python-rrdtool, python-psycopg2, python3-cracklib, libjs-jquery, apache2, libapache2-mod-python, python-django, xz-utils, python-daemon, ntp, python-daemon
+
+libgps-dev postgresql python3-serial python3-psycopg2 python3-cracklib libjs-jquery apache2 xz-utils python3-certbot-apache libapache2-mod-wsgi-py3 python3-django python3-daemon ntp
+#NNN libgps-dev
+#NNN python3-rrdtool
-libgps-dev postgresql python-serial python-rrdtool python-psycopg2 python-cracklib libjs-jquery apache2 libapache2-mod-python python-django xz-utils python-daemon ntp python-daemon
-
-python-decoratedstr (from nirgal.com)
+python3-decoratedstr (from nirgal.com)
python-htmlentities (from nirgal.com)
= File system setup
===================
+Disk will contain a lot of small files. It is recommanded to use "-T small" when creating the filesystem.
addgroup ais
mkdir --mode 2775 /var/lib/ais # mode g+ws
chown :ais /var/lib/ais
= Python module setup
=====================
-ln -s /home/nirgal/kod/ais/bin /usr/share/pyshared/ais
-find /usr/share/pyshared/ais/ -name *.py > /usr/share/python-support/ais.public
-update-python-modules /usr/share/python-support/ais.public
+ln -s /home/nirgal/ais/bin /usr/lib/python3/dist-packages/ais
= Kernel tuning
===============
= Autostart
===========
-ln -s /home/nirgal/kod/ais/etc/init.sh /etc/init.d/ais
+ln -s /etc/ais/init.sh /etc/init.d/ais
update-rc.d ais defaults
= Apache setup
==============
+ln -s /etc/ais/httpd.conf /etc/apache2/site-available/ais.conf
+
+mkdir /usr/lib/ais
+ln -s /home/nirgal/ais/www_templates /usr/share/ais/
+ln -s /home/nirgal/ais/kmz_icons /usr/share/ais/
+
+ln -s /home/nirgal/ais/www /var/www/ais
+
adduser www-data ais
change apache umask in /etc/apache2/envvars from 022 to 002 so that new folders are group writables. If there is no such line, just add "umask 002" at the end of the file.
a2enmod ssl headers rewrite
+a2ensite ais
+
+
+certbot certonly --authenticator webroot --installer apache --webroot-path /var/www/ais/ --cert-name ais -d ais.nirgal.com --register-unsafely-without-email
-#!/usr/bin/env python
-# -*- encoding: utf-8
+#!/usr/bin/env python3
"""
Library for areas.
Basic usage is checking whether a point is inside an area.
FIXME: It should works using polar coordinated, but now works in 2D.
"""
-from __future__ import division
-
-__all__ = [ 'Area', 'load_area_from_kml_polygon', 'list_areas' ]
-
import os
import re
-AREA_DIR = u'/var/lib/ais/areas/'
+__all__ = ['Area', 'load_area_from_kml_polygon', 'list_areas']
+
+AREA_DIR = '/var/lib/ais/areas/'
+
class Area:
"""
"""
self.points.append(point)
# min/max doesn't work around the change date line...
- #if len(self.points)==1:
- # self.min = point
- # self.max = point
- # return
- #if point[0] < self.min[0]:
- # self.min = (point[0], self.min[1])
- #elif point[0] > self.max[0]:
- # self.max = (point[0], self.max[1])
- #if point[1] < self.min[1]:
- # self.min = (self.min[0], point[1])
- #elif point[1] > self.max[1]:
- # self.max = (self.max[0], point[1])
+ # if len(self.points)==1:
+ # self.min = point
+ # self.max = point
+ # return
+ # if point[0] < self.min[0]:
+ # self.min = (point[0], self.min[1])
+ # elif point[0] > self.max[0]:
+ # self.max = (point[0], self.max[1])
+ # if point[1] < self.min[1]:
+ # self.min = (self.min[0], point[1])
+ # elif point[1] > self.max[1]:
+ # self.max = (self.max[0], point[1])
def reverse(self):
'''
def check(self):
"""
- Area library self-test:
+ Area library self-test:
We only support counter-clockwise and convex areas.
"""
for point in self.points:
if not self.points:
return False
# first test the bounding box
- #if point[0] < self.min[0] \
- # or point[0] > self.max[0] \
- # or point[1] < self.min[1] \
- # or point[1] > self.max[1] :
- # return False
+ # if point[0] < self.min[0] \
+ # or point[0] > self.max[0] \
+ # or point[1] < self.min[1] \
+ # or point[1] > self.max[1] :
+ # return False
for i in range(len(self.points)):
p1 = self.points[i]
x1, y1 = p1
- p2 = self.points[(i+1)%len(self.points)]
+ p2 = self.points[(i+1) % len(self.points)]
x2, y2 = p2
xa = point[0] - x1
ya = point[1] - y1
return False
return True
+
def load_area_from_kml_polygon(filename, reverse=False):
"""
Loads a kml file into an Area structure.
Actually, it may be clockwise, but then you need reverse=True.
"""
kml = open(filename).read()
- coordinates_lines = re.findall('<coordinates>(.*)</coordinates>', kml, re.IGNORECASE|re.DOTALL)
+ coordinates_lines = re.findall('<coordinates>(.*)</coordinates>', kml, re.IGNORECASE | re.DOTALL)
assert len(coordinates_lines) == 1, \
'There should be exactly one set of <coordinates> %s' % filename
coordinates = coordinates_lines[0].replace('\n', ' ').replace('\r', ' ').replace('\t', ' ')
- coordinates = [ xyz for xyz in coordinates.split(' ') if xyz ]
+ coordinates = [xyz for xyz in coordinates.split(' ') if xyz]
assert coordinates[0] == coordinates[-1], \
'First and last coordinates of %s should be the same: %s, %s' % \
(filename, coordinates[0], coordinates[-1])
- assert len(coordinates)>3, 'polygon should have 3 edges minimum'
-
+ assert len(coordinates) > 3, 'polygon should have 3 edges minimum'
+
area = Area()
for xyz in coordinates[:-1]:
x, y, z = xyz.split(',')
"""
results = []
for filename in os.listdir(AREA_DIR):
- if not filename.endswith(u'.kml'):
- continue # ignore non-kml files
+ if not filename.endswith('.kml'):
+ continue # ignore non-kml files
results.append((filename[:-4], AREA_DIR+filename))
# sort by name
- results.sort(cmp=lambda a1,a2: cmp(a1[0], a2[0]))
+ results.sort(key=lambda area: area[0])
return results
-
+
+
if __name__ == '__main__':
- print list_areas()
+ print(list_areas())
pelagos = load_area_from_kml_polygon('/var/lib/ais/areas/pelagos.kml')
# counter clock-wise : Positive
- #pelagos = Area([
- # (42.91, 12.5),
- # (45.3612930132714, 10.01843703552244),
- # (43.6,5.5),
- # (40.57,8.6)
- # ])
+ # pelagos = Area([
+ # (42.91, 12.5),
+ # (45.3612930132714, 10.01843703552244),
+ # (43.6,5.5),
+ # (40.57,8.6)
+ # ])
for p in [
- (42,9),
- (41,5),
- (40,12),
- (45,13),
- (45,7),
- ]:
- print "testing", p
+ (42, 9),
+ (41, 5),
+ (40, 12),
+ (45, 13),
+ (45, 7),
+ ]:
+ print("testing", p)
if pelagos.contains(p):
- print "inside"
+ print("inside")
else:
- print"outside"
-
+ print("outside")
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
-import sys
+import csv
+import logging
import os
import struct
-import logging
-from datetime import datetime, timedelta, date, time
-from fcntl import lockf, LOCK_EX, LOCK_UN, LOCK_SH
-import csv
-
-from ais.ntools import *
-from ais.db import *
-from ais.area import load_area_from_kml_polygon
-from ais.earth3d import dist3_latlong_ais, dist3_xyz, latlon_to_xyz_deg, latlon_to_xyz_ais
+import sys
+from datetime import date, datetime, time, timedelta
+from fcntl import LOCK_EX, LOCK_SH, LOCK_UN, lockf
import ais.inputs.config
+from ais.area import load_area_from_kml_polygon
+from ais.db import *
+from ais.earth3d import (dist3_latlong_ais, dist3_xyz, latlon_to_xyz_ais,
+ latlon_to_xyz_deg)
+from ais.ntools import *
__all__ = [
'DB_STARTDATE', 'DBPATH',
'AIS_COG_SCALE', 'AIS_COG_NOT_AVAILABLE',
'AIS_NO_HEADING',
'AIS_SOG_SCALE', 'AIS_SOG_NOT_AVAILABLE', 'AIS_SOG_FAST_MOVER', 'AIS_SOG_MAX_SPEED',
- #'_hash3_pathfilename',
'db_bydate_addrecord',
'db_lastinfo_setrecord_ifnewer',
'add_nmea1',
'filter_knownposition',
'filter_speedcheck',
]
-
+
DB_STARTDATE = datetime(2008, 6, 1)
# This is the location of the filesystem database
# see make-countries.py
COUNTRIES_MID = {
- 201: u'Albania',
- 202: u'Andorra',
- 203: u'Austria',
- 204: u'Azores',
- 205: u'Belgium',
- 206: u'Belarus',
- 207: u'Bulgaria',
- 208: u'Vatican City State',
- 209: u'Cyprus',
- 210: u'Cyprus',
- 211: u'Germany',
- 212: u'Cyprus',
- 213: u'Georgia',
- 214: u'Moldova',
- 215: u'Malta',
- 216: u'Armenia',
- 218: u'Germany',
- 219: u'Denmark',
- 220: u'Denmark',
- 224: u'Spain',
- 225: u'Spain',
- 226: u'France',
- 227: u'France',
- 228: u'France',
- 230: u'Finland',
- 231: u'Faroe Islands',
- 232: u'United Kingdom',
- 233: u'United Kingdom',
- 234: u'United Kingdom',
- 235: u'United Kingdom',
- 236: u'Gibraltar',
- 237: u'Greece',
- 238: u'Croatia',
- 239: u'Greece',
- 240: u'Greece',
- 242: u'Morocco',
- 243: u'Hungary',
- 244: u'Netherlands',
- 245: u'Netherlands',
- 246: u'Netherlands',
- 247: u'Italy',
- 248: u'Malta',
- 249: u'Malta',
- 250: u'Ireland',
- 251: u'Iceland',
- 252: u'Liechtenstein',
- 253: u'Luxembourg',
- 254: u'Monaco',
- 255: u'Madeira',
- 256: u'Malta',
- 257: u'Norway',
- 258: u'Norway',
- 259: u'Norway',
- 261: u'Poland',
- 262: u'Montenegro',
- 263: u'Portugal',
- 264: u'Romania',
- 265: u'Sweden',
- 266: u'Sweden',
- 267: u'Slovak Republic',
- 268: u'San Marino',
- 269: u'Switzerland',
- 270: u'Czech Republic',
- 271: u'Turkey',
- 272: u'Ukraine',
- 273: u'Russian Federation',
- 274: u'The Former Yugoslav Republic of Macedonia',
- 275: u'Latvia',
- 276: u'Estonia',
- 277: u'Lithuania',
- 278: u'Slovenia',
- 279: u'Serbia',
- 301: u'Anguilla',
- 303: u'Alaska',
- 304: u'Antigua and Barbuda',
- 305: u'Antigua and Barbuda',
- 306: u'Netherlands Antilles',
- 307: u'Aruba',
- 308: u'Bahamas',
- 309: u'Bahamas',
- 310: u'Bermuda',
- 311: u'Bahamas',
- 312: u'Belize',
- 314: u'Barbados',
- 316: u'Canada',
- 319: u'Cayman Islands',
- 321: u'Costa Rica',
- 323: u'Cuba',
- 325: u'Dominica',
- 327: u'Dominican Republic',
- 329: u'Guadeloupe',
- 330: u'Grenada',
- 331: u'Greenland',
- 332: u'Guatemala',
- 334: u'Honduras',
- 336: u'Haiti',
- 338: u'United States of America',
- 339: u'Jamaica',
- 341: u'Saint Kitts and Nevis',
- 343: u'Saint Lucia',
- 345: u'Mexico',
- 347: u'Martinique',
- 348: u'Montserrat',
- 350: u'Nicaragua',
- 351: u'Panama',
- 352: u'Panama',
- 353: u'Panama',
- 354: u'Panama',
- 355: u'Panama',
- 356: u'Panama',
- 357: u'Panama',
- 358: u'Puerto Rico',
- 359: u'El Salvador',
- 361: u'Saint Pierre and Miquelon',
- 362: u'Trinidad and Tobago',
- 364: u'Turks and Caicos Islands',
- 366: u'United States of America',
- 367: u'United States of America',
- 368: u'United States of America',
- 369: u'United States of America',
- 370: u'Panama',
- 371: u'Panama',
- 372: u'Panama',
- 375: u'Saint Vincent and the Grenadines',
- 376: u'Saint Vincent and the Grenadines',
- 377: u'Saint Vincent and the Grenadines',
- 378: u'British Virgin Islands',
- 379: u'United States Virgin Islands',
- 401: u'Afghanistan',
- 403: u'Saudi Arabia',
- 405: u'Bangladesh',
- 408: u'Bahrain',
- 410: u'Bhutan',
- 412: u'China',
- 413: u'China',
- 416: u'Taiwan',
- 417: u'Sri Lanka',
- 419: u'India',
- 422: u'Iran',
- 423: u'Azerbaijani Republic',
- 425: u'Iraq',
- 428: u'Israel',
- 431: u'Japan',
- 432: u'Japan',
- 434: u'Turkmenistan',
- 436: u'Kazakhstan',
- 437: u'Uzbekistan',
- 438: u'Jordan',
- 440: u'Korea',
- 441: u'Korea',
- 443: u'Palestine',
- 445: u"Democratic People's Republic of Korea",
- 447: u'Kuwait',
- 450: u'Lebanon',
- 451: u'Kyrgyz Republic',
- 453: u'Macao',
- 455: u'Maldives',
- 457: u'Mongolia',
- 459: u'Nepal',
- 461: u'Oman',
- 463: u'Pakistan',
- 466: u'Qatar',
- 468: u'Syrian Arab Republic',
- 470: u'United Arab Emirates',
- 473: u'Yemen',
- 475: u'Yemen',
- 477: u'Hong Kong',
- 478: u'Bosnia and Herzegovina',
- 501: u'Adelie Land',
- 503: u'Australia',
- 506: u'Myanmar',
- 508: u'Brunei Darussalam',
- 510: u'Micronesia',
- 511: u'Palau',
- 512: u'New Zealand',
- 514: u'Cambodia',
- 515: u'Cambodia',
- 516: u'Christmas Island',
- 518: u'Cook Islands',
- 520: u'Fiji',
- 523: u'Cocos',
- 525: u'Indonesia',
- 529: u'Kiribati',
- 531: u"Lao People's Democratic Republic",
- 533: u'Malaysia',
- 536: u'Northern Mariana Islands',
- 538: u'Marshall Islands',
- 540: u'New Caledonia',
- 542: u'Niue',
- 544: u'Nauru',
- 546: u'French Polynesia',
- 548: u'Philippines',
- 553: u'Papua New Guinea',
- 555: u'Pitcairn Island',
- 557: u'Solomon Islands',
- 559: u'American Samoa',
- 561: u'Samoa',
- 563: u'Singapore',
- 564: u'Singapore',
- 565: u'Singapore',
- 567: u'Thailand',
- 570: u'Tonga',
- 572: u'Tuvalu',
- 574: u'Viet Nam',
- 576: u'Vanuatu',
- 578: u'Wallis and Futuna Islands',
- 601: u'South Africa',
- 603: u'Angola',
- 605: u'Algeria',
- 607: u'Saint Paul and Amsterdam Islands',
- 608: u'Ascension Island',
- 609: u'Burundi',
- 610: u'Benin',
- 611: u'Botswana',
- 612: u'Central African Republic',
- 613: u'Cameroon',
- 615: u'Congo',
- 616: u'Comoros',
- 617: u'Cape Verde',
- 618: u'Crozet Archipelago',
- 619: u"Côte d'Ivoire",
- 621: u'Djibouti',
- 622: u'Egypt',
- 624: u'Ethiopia',
- 625: u'Eritrea',
- 626: u'Gabonese Republic',
- 627: u'Ghana',
- 629: u'Gambia',
- 630: u'Guinea-Bissau',
- 631: u'Equatorial Guinea',
- 632: u'Guinea',
- 633: u'Burkina Faso',
- 634: u'Kenya',
- 635: u'Kerguelen Islands',
- 636: u'Liberia',
- 637: u'Liberia',
- 642: u"Socialist People's Libyan Arab Jamahiriya",
- 644: u'Lesotho',
- 645: u'Mauritius',
- 647: u'Madagascar',
- 649: u'Mali',
- 650: u'Mozambique',
- 654: u'Mauritania',
- 655: u'Malawi',
- 656: u'Niger',
- 657: u'Nigeria',
- 659: u'Namibia',
- 660: u'Reunion',
- 661: u'Rwanda',
- 662: u'Sudan',
- 663: u'Senegal',
- 664: u'Seychelles',
- 665: u'Saint Helena',
- 666: u'Somali Democratic Republic',
- 667: u'Sierra Leone',
- 668: u'Sao Tome and Principe',
- 669: u'Swaziland',
- 670: u'Chad',
- 671: u'Togolese Republic',
- 672: u'Tunisia',
- 674: u'Tanzania',
- 675: u'Uganda',
- 676: u'Democratic Republic of the Congo',
- 677: u'Tanzania',
- 678: u'Zambia',
- 679: u'Zimbabwe',
- 701: u'Argentine Republic',
- 710: u'Brazil',
- 720: u'Bolivia',
- 725: u'Chile',
- 730: u'Colombia',
- 735: u'Ecuador',
- 740: u'Falkland Islands',
- 745: u'Guiana',
- 750: u'Guyana',
- 755: u'Paraguay',
- 760: u'Peru',
- 765: u'Suriname',
- 770: u'Uruguay',
- 775: u'Venezuela',
+ 201: 'Albania',
+ 202: 'Andorra',
+ 203: 'Austria',
+ 204: 'Azores',
+ 205: 'Belgium',
+ 206: 'Belarus',
+ 207: 'Bulgaria',
+ 208: 'Vatican City State',
+ 209: 'Cyprus',
+ 210: 'Cyprus',
+ 211: 'Germany',
+ 212: 'Cyprus',
+ 213: 'Georgia',
+ 214: 'Moldova',
+ 215: 'Malta',
+ 216: 'Armenia',
+ 218: 'Germany',
+ 219: 'Denmark',
+ 220: 'Denmark',
+ 224: 'Spain',
+ 225: 'Spain',
+ 226: 'France',
+ 227: 'France',
+ 228: 'France',
+ 230: 'Finland',
+ 231: 'Faroe Islands',
+ 232: 'United Kingdom',
+ 233: 'United Kingdom',
+ 234: 'United Kingdom',
+ 235: 'United Kingdom',
+ 236: 'Gibraltar',
+ 237: 'Greece',
+ 238: 'Croatia',
+ 239: 'Greece',
+ 240: 'Greece',
+ 242: 'Morocco',
+ 243: 'Hungary',
+ 244: 'Netherlands',
+ 245: 'Netherlands',
+ 246: 'Netherlands',
+ 247: 'Italy',
+ 248: 'Malta',
+ 249: 'Malta',
+ 250: 'Ireland',
+ 251: 'Iceland',
+ 252: 'Liechtenstein',
+ 253: 'Luxembourg',
+ 254: 'Monaco',
+ 255: 'Madeira',
+ 256: 'Malta',
+ 257: 'Norway',
+ 258: 'Norway',
+ 259: 'Norway',
+ 261: 'Poland',
+ 262: 'Montenegro',
+ 263: 'Portugal',
+ 264: 'Romania',
+ 265: 'Sweden',
+ 266: 'Sweden',
+ 267: 'Slovak Republic',
+ 268: 'San Marino',
+ 269: 'Switzerland',
+ 270: 'Czech Republic',
+ 271: 'Turkey',
+ 272: 'Ukraine',
+ 273: 'Russian Federation',
+ 274: 'The Former Yugoslav Republic of Macedonia',
+ 275: 'Latvia',
+ 276: 'Estonia',
+ 277: 'Lithuania',
+ 278: 'Slovenia',
+ 279: 'Serbia',
+ 301: 'Anguilla',
+ 303: 'Alaska',
+ 304: 'Antigua and Barbuda',
+ 305: 'Antigua and Barbuda',
+ 306: 'Netherlands Antilles',
+ 307: 'Aruba',
+ 308: 'Bahamas',
+ 309: 'Bahamas',
+ 310: 'Bermuda',
+ 311: 'Bahamas',
+ 312: 'Belize',
+ 314: 'Barbados',
+ 316: 'Canada',
+ 319: 'Cayman Islands',
+ 321: 'Costa Rica',
+ 323: 'Cuba',
+ 325: 'Dominica',
+ 327: 'Dominican Republic',
+ 329: 'Guadeloupe',
+ 330: 'Grenada',
+ 331: 'Greenland',
+ 332: 'Guatemala',
+ 334: 'Honduras',
+ 336: 'Haiti',
+ 338: 'United States of America',
+ 339: 'Jamaica',
+ 341: 'Saint Kitts and Nevis',
+ 343: 'Saint Lucia',
+ 345: 'Mexico',
+ 347: 'Martinique',
+ 348: 'Montserrat',
+ 350: 'Nicaragua',
+ 351: 'Panama',
+ 352: 'Panama',
+ 353: 'Panama',
+ 354: 'Panama',
+ 355: 'Panama',
+ 356: 'Panama',
+ 357: 'Panama',
+ 358: 'Puerto Rico',
+ 359: 'El Salvador',
+ 361: 'Saint Pierre and Miquelon',
+ 362: 'Trinidad and Tobago',
+ 364: 'Turks and Caicos Islands',
+ 366: 'United States of America',
+ 367: 'United States of America',
+ 368: 'United States of America',
+ 369: 'United States of America',
+ 370: 'Panama',
+ 371: 'Panama',
+ 372: 'Panama',
+ 375: 'Saint Vincent and the Grenadines',
+ 376: 'Saint Vincent and the Grenadines',
+ 377: 'Saint Vincent and the Grenadines',
+ 378: 'British Virgin Islands',
+ 379: 'United States Virgin Islands',
+ 401: 'Afghanistan',
+ 403: 'Saudi Arabia',
+ 405: 'Bangladesh',
+ 408: 'Bahrain',
+ 410: 'Bhutan',
+ 412: 'China',
+ 413: 'China',
+ 416: 'Taiwan',
+ 417: 'Sri Lanka',
+ 419: 'India',
+ 422: 'Iran',
+ 423: 'Azerbaijani Republic',
+ 425: 'Iraq',
+ 428: 'Israel',
+ 431: 'Japan',
+ 432: 'Japan',
+ 434: 'Turkmenistan',
+ 436: 'Kazakhstan',
+ 437: 'Uzbekistan',
+ 438: 'Jordan',
+ 440: 'Korea',
+ 441: 'Korea',
+ 443: 'Palestine',
+ 445: "Democratic People's Republic of Korea",
+ 447: 'Kuwait',
+ 450: 'Lebanon',
+ 451: 'Kyrgyz Republic',
+ 453: 'Macao',
+ 455: 'Maldives',
+ 457: 'Mongolia',
+ 459: 'Nepal',
+ 461: 'Oman',
+ 463: 'Pakistan',
+ 466: 'Qatar',
+ 468: 'Syrian Arab Republic',
+ 470: 'United Arab Emirates',
+ 473: 'Yemen',
+ 475: 'Yemen',
+ 477: 'Hong Kong',
+ 478: 'Bosnia and Herzegovina',
+ 501: 'Adelie Land',
+ 503: 'Australia',
+ 506: 'Myanmar',
+ 508: 'Brunei Darussalam',
+ 510: 'Micronesia',
+ 511: 'Palau',
+ 512: 'New Zealand',
+ 514: 'Cambodia',
+ 515: 'Cambodia',
+ 516: 'Christmas Island',
+ 518: 'Cook Islands',
+ 520: 'Fiji',
+ 523: 'Cocos',
+ 525: 'Indonesia',
+ 529: 'Kiribati',
+ 531: "Lao People's Democratic Republic",
+ 533: 'Malaysia',
+ 536: 'Northern Mariana Islands',
+ 538: 'Marshall Islands',
+ 540: 'New Caledonia',
+ 542: 'Niue',
+ 544: 'Nauru',
+ 546: 'French Polynesia',
+ 548: 'Philippines',
+ 553: 'Papua New Guinea',
+ 555: 'Pitcairn Island',
+ 557: 'Solomon Islands',
+ 559: 'American Samoa',
+ 561: 'Samoa',
+ 563: 'Singapore',
+ 564: 'Singapore',
+ 565: 'Singapore',
+ 567: 'Thailand',
+ 570: 'Tonga',
+ 572: 'Tuvalu',
+ 574: 'Viet Nam',
+ 576: 'Vanuatu',
+ 578: 'Wallis and Futuna Islands',
+ 601: 'South Africa',
+ 603: 'Angola',
+ 605: 'Algeria',
+ 607: 'Saint Paul and Amsterdam Islands',
+ 608: 'Ascension Island',
+ 609: 'Burundi',
+ 610: 'Benin',
+ 611: 'Botswana',
+ 612: 'Central African Republic',
+ 613: 'Cameroon',
+ 615: 'Congo',
+ 616: 'Comoros',
+ 617: 'Cape Verde',
+ 618: 'Crozet Archipelago',
+ 619: "Côte d'Ivoire",
+ 621: 'Djibouti',
+ 622: 'Egypt',
+ 624: 'Ethiopia',
+ 625: 'Eritrea',
+ 626: 'Gabonese Republic',
+ 627: 'Ghana',
+ 629: 'Gambia',
+ 630: 'Guinea-Bissau',
+ 631: 'Equatorial Guinea',
+ 632: 'Guinea',
+ 633: 'Burkina Faso',
+ 634: 'Kenya',
+ 635: 'Kerguelen Islands',
+ 636: 'Liberia',
+ 637: 'Liberia',
+ 642: "Socialist People's Libyan Arab Jamahiriya",
+ 644: 'Lesotho',
+ 645: 'Mauritius',
+ 647: 'Madagascar',
+ 649: 'Mali',
+ 650: 'Mozambique',
+ 654: 'Mauritania',
+ 655: 'Malawi',
+ 656: 'Niger',
+ 657: 'Nigeria',
+ 659: 'Namibia',
+ 660: 'Reunion',
+ 661: 'Rwanda',
+ 662: 'Sudan',
+ 663: 'Senegal',
+ 664: 'Seychelles',
+ 665: 'Saint Helena',
+ 666: 'Somali Democratic Republic',
+ 667: 'Sierra Leone',
+ 668: 'Sao Tome and Principe',
+ 669: 'Swaziland',
+ 670: 'Chad',
+ 671: 'Togolese Republic',
+ 672: 'Tunisia',
+ 674: 'Tanzania',
+ 675: 'Uganda',
+ 676: 'Democratic Republic of the Congo',
+ 677: 'Tanzania',
+ 678: 'Zambia',
+ 679: 'Zimbabwe',
+ 701: 'Argentine Republic',
+ 710: 'Brazil',
+ 720: 'Bolivia',
+ 725: 'Chile',
+ 730: 'Colombia',
+ 735: 'Ecuador',
+ 740: 'Falkland Islands',
+ 745: 'Guiana',
+ 750: 'Guyana',
+ 755: 'Paraguay',
+ 760: 'Peru',
+ 765: 'Suriname',
+ 770: 'Uruguay',
+ 775: 'Venezuela',
}
STATUS_CODES = {
11: '11 - Reserved for future use',
12: '12 - Reserved for future use',
13: '13 - Reserved for future use',
- 14: '14 - Reserved for future use', # Land stations
- 15: 'Not defined', # default
+ 14: '14 - Reserved for future use', # Land stations
+ 15: 'Not defined', # default
}
SHIP_TYPES = {
67: 'Passenger, Reserved for future use',
68: 'Passenger, Reserved for future use',
69: 'Passenger, No additional information',
- 70: 'Cargo', # 'Cargo, all ships of this type',
+ 70: 'Cargo', # 'Cargo, all ships of this type',
71: 'Cargo, Hazardous category A',
72: 'Cargo, Hazardous category B',
73: 'Cargo, Hazardous category C',
74: 'Cargo, Hazardous category D',
- 75: 'Cargo', # 'Cargo, Reserved for future use',
- 76: 'Cargo', # 'Cargo, Reserved for future use',
- 77: 'Cargo', # 'Cargo, Reserved for future use',
- 78: 'Cargo', # 'Cargo, Reserved for future use',
- 79: 'Cargo', # 'Cargo, No additional information',
- 80: 'Tanker', # 'Tanker, all ships of this type',
+ 75: 'Cargo', # 'Cargo, Reserved for future use',
+ 76: 'Cargo', # 'Cargo, Reserved for future use',
+ 77: 'Cargo', # 'Cargo, Reserved for future use',
+ 78: 'Cargo', # 'Cargo, Reserved for future use',
+ 79: 'Cargo', # 'Cargo, No additional information',
+ 80: 'Tanker', # 'Tanker, all ships of this type',
81: 'Tanker, Hazardous category A',
82: 'Tanker, Hazardous category B',
83: 'Tanker, Hazardous category C',
84: 'Tanker, Hazardous category D',
- 85: 'Tanker', # 'Tanker, Reserved for future use',
- 86: 'Tanker', # 'Tanker, Reserved for future use',
- 87: 'Tanker', # 'Tanker, Reserved for future use',
- 88: 'Tanker', # 'Tanker, Reserved for future use',
+ 85: 'Tanker', # 'Tanker, Reserved for future use',
+ 86: 'Tanker', # 'Tanker, Reserved for future use',
+ 87: 'Tanker', # 'Tanker, Reserved for future use',
+ 88: 'Tanker', # 'Tanker, Reserved for future use',
89: 'Tanker, No additional information',
90: 'Other Type, all ships of this type',
91: 'Other Type, Hazardous category A',
AIS_STATUS_NOT_AVAILABLE = 15
AIS_ROT_HARD_LEFT = -127
AIS_ROT_HARD_RIGHT = 127
-AIS_ROT_NOT_AVAILABLE = -128 # not like gpsd
+AIS_ROT_NOT_AVAILABLE = -128 # not like gpsd
AIS_LATLON_SCALE = 600000.0
AIS_LON_NOT_AVAILABLE = 0x6791AC0
filename = os.path.join(DBPATH, 'bydate', strdt, _hash3_pathfilename(basefilename))
f = open_with_mkdirs(filename, 'ab')
lockf(f, LOCK_EX)
- #f.seek(0,2) # go to EOF
+ # f.seek(0,2) # go to EOF
assert f.tell() % len(record) == 0, 'Invalid length for %s' % filename
f.write(record)
f.close()
return updated
-def _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
- eta_M, eta_D, eta_h, eta_m, draught, destination, source):
+def _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
+ eta_M, eta_D, eta_h, eta_m, draught, destination, source):
''' Don't call directly '''
sqlinfo = {}
sqlinfo['mmsi'] = strmmsi_to_mmsi(strmmsi)
sqlinfo['dim_port'] = dim_port
sqlinfo['dim_starboard'] = dim_starboard
sqlinfo['destination'] = None
- eta = '%02d%02d%02d%02d' % ( eta_M, eta_D, eta_h, eta_m)
+ eta = '%02d%02d%02d%02d' % (eta_M, eta_D, eta_h, eta_m)
if eta == '00000000':
# FIXME tempory hack for corrupted db/latest/*.nmea5 file
eta = '00002460'
destination = destination.replace('\0', ' ').rstrip(' @\0')
sqlinfo['destination'] = destination or None
sqlinfo['source'] = source
- sqlexec(u'''INSERT INTO vessel (mmsi, updated) SELECT %(mmsi)s, '1970-01-01T00:00:00' WHERE NOT EXISTS (SELECT * FROM vessel WHERE mmsi=%(mmsi)s)''', sqlinfo)
+ sqlexec('''INSERT INTO vessel (mmsi, updated) SELECT %(mmsi)s, '1970-01-01T00:00:00' WHERE NOT EXISTS (SELECT * FROM vessel WHERE mmsi=%(mmsi)s)''', sqlinfo)
if sqlinfo['imo']:
- sqlexec(u'UPDATE vessel SET imo = %(imo)s WHERE mmsi=%(mmsi)s AND (imo IS NULL OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET imo = %(imo)s WHERE mmsi=%(mmsi)s AND (imo IS NULL OR updated<%(updated)s)', sqlinfo)
if sqlinfo['name']:
- sqlexec(u'UPDATE vessel SET name = %(name)s WHERE mmsi=%(mmsi)s AND (name IS NULL OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET name = %(name)s WHERE mmsi=%(mmsi)s AND (name IS NULL OR updated<%(updated)s)', sqlinfo)
if sqlinfo['callsign']:
- sqlexec(u'UPDATE vessel SET callsign = %(callsign)s WHERE mmsi=%(mmsi)s AND (callsign IS NULL OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET callsign = %(callsign)s WHERE mmsi=%(mmsi)s AND (callsign IS NULL OR updated<%(updated)s)', sqlinfo)
if sqlinfo['type']:
- sqlexec(u'UPDATE vessel SET type = %(type)s WHERE mmsi=%(mmsi)s AND (type IS NULL OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET type = %(type)s WHERE mmsi=%(mmsi)s AND (type IS NULL OR updated<%(updated)s)', sqlinfo)
if sqlinfo['dim_bow'] or sqlinfo['dim_stern']:
- sqlexec(u'UPDATE vessel SET dim_bow = %(dim_bow)s, dim_stern = %(dim_stern)s WHERE mmsi=%(mmsi)s AND ((dim_port = 0 OR dim_stern=0) OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET dim_bow = %(dim_bow)s, dim_stern = %(dim_stern)s WHERE mmsi=%(mmsi)s AND ((dim_port = 0 OR dim_stern=0) OR updated<%(updated)s)', sqlinfo)
if sqlinfo['dim_port'] or sqlinfo['dim_starboard']:
- sqlexec(u'UPDATE vessel SET dim_port = %(dim_port)s, dim_starboard = %(dim_starboard)s WHERE mmsi=%(mmsi)s AND ((dim_port = 0 OR dim_starboard=0) OR updated<%(updated)s)', sqlinfo)
+ sqlexec('UPDATE vessel SET dim_port = %(dim_port)s, dim_starboard = %(dim_starboard)s WHERE mmsi=%(mmsi)s AND ((dim_port = 0 OR dim_starboard=0) OR updated<%(updated)s)', sqlinfo)
if sqlinfo['destination'] or sqlinfo['eta'] != '00002460':
- sqlexec(u"UPDATE vessel SET destination = %(destination)s, eta = %(eta)s WHERE mmsi=%(mmsi)s AND (destination IS NULL OR eta = '00002460' OR updated<%(updated)s)", sqlinfo)
- sqlexec(u'UPDATE vessel SET (updated, source) = (%(updated)s, %(source)s) WHERE mmsi=%(mmsi)s AND updated<%(updated)s', sqlinfo)
+ sqlexec("UPDATE vessel SET destination = %(destination)s, eta = %(eta)s WHERE mmsi=%(mmsi)s AND (destination IS NULL OR eta = '00002460' OR updated<%(updated)s)", sqlinfo)
+ sqlexec('UPDATE vessel SET (updated, source) = (%(updated)s, %(source)s) WHERE mmsi=%(mmsi)s AND updated<%(updated)s', sqlinfo)
dbcommit()
-
-
AIVDM_RECORD123_FORMAT = 'IBbhiiII4s'
AIVDM_RECORD123_LENGTH = struct.calcsize(AIVDM_RECORD123_FORMAT)
AIVDM_RECORD5_FORMAT = 'II20s7sBHHBBBBBBH20s4s'
AIVDM_RECORD5_LENGTH = struct.calcsize(AIVDM_RECORD5_FORMAT)
-def add_nmea1(strmmsi, timestamp, status, rot, sog, \
+def unpack_to_str(format, record):
+ """
+ Similar to struct.unpack
+ but automatically converts bytes into strings
+ """
+ values = struct.unpack(format, record)
+ def __bytes_to_str(value):
+ if type(value) == bytes:
+ return str(value, 'utf8')
+ return value
+ return [__bytes_to_str(value) for value in values]
+
+
+def pack_from_str(format, *values):
+ """
+ Similar to struct.pack
+ but automatically converts strings into bytes
+ """
+
+ def __str_to_bytes(value):
+ if type(value) == str:
+ return value.encode()
+ return value
+ values = [__str_to_bytes(value) for value in values]
+ return struct.pack(format, *values)
+
+
+def add_nmea1(strmmsi, timestamp, status, rot, sog,
latitude, longitude, cog, heading, source):
'''
Input is raw data, unscaled
FIXME: lat & lon are inverted compared to raw aivdm structure
'''
- record = struct.pack(AIVDM_RECORD123_FORMAT, timestamp, status, rot, sog, latitude, longitude, cog, heading, source)
- #print repr(record)
+ record = pack_from_str(AIVDM_RECORD123_FORMAT, timestamp, status, rot, sog, latitude, longitude, cog, heading, source)
+ # print(repr(record))
filename = strmmsi+'.nmea1'
db_bydate_addrecord(filename, record, timestamp)
# There's no need to be smart: all the information are taken, or none.
return db_lastinfo_setrecord_ifnewer(filename, record, timestamp)
-def add_nmea5_full(strmmsi, timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
+def add_nmea5_full(strmmsi, timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
eta_M, eta_D, eta_h, eta_m, draught, destination, source):
'''
Input is raw data, unscaled
All fields are set, and can be upgraded if the record is newer
FIXME: name & callsign are inverted compared to raw aivdm structure
'''
- record = struct.pack(AIVDM_RECORD5_FORMAT, timestamp, imo, name, callsign, \
- type, dim_bow, dim_stern, dim_port, dim_starboard, \
+ record = pack_from_str(AIVDM_RECORD5_FORMAT, timestamp, imo, name, callsign,
+ type, dim_bow, dim_stern, dim_port, dim_starboard,
eta_M, eta_D, eta_h, eta_m, draught, destination, source)
- #print repr(record)
filename = strmmsi+'.nmea5'
db_bydate_addrecord(filename, record, timestamp)
updated = db_lastinfo_setrecord_ifnewer(filename, record, timestamp)
if updated:
- _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
- eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+ _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
+ eta_M, eta_D, eta_h, eta_m, draught, destination, source)
return updated
-def add_nmea5_partial(strmmsi, timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
+
+def add_nmea5_partial(strmmsi, timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
eta_M, eta_D, eta_h, eta_m, draught, destination, source):
'''
Input is raw data, unscaled
All fields are not set. Only some of them can be upgraded, if they're newer
'''
- record = struct.pack(AIVDM_RECORD5_FORMAT, \
- timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
- eta_M, eta_D, eta_h, eta_m, draught, destination, \
+ record = pack_from_str(AIVDM_RECORD5_FORMAT,
+ timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
+ eta_M, eta_D, eta_h, eta_m, draught, destination,
source)
- #print repr(record)
filename = strmmsi + '.nmea5'
db_bydate_addrecord(filename, record, timestamp)
else:
lockf(f, LOCK_EX)
oldrecord = f.read(AIVDM_RECORD5_LENGTH)
- oldtimestamp, oldimo, oldname, oldcallsign, oldtype, \
- olddim_bow, olddim_stern, olddim_port, olddim_starboard, \
- oldeta_M, oldeta_D, oldeta_h, oldeta_m, \
- olddraught, olddestination, oldsource \
- = struct.unpack(AIVDM_RECORD5_FORMAT, oldrecord)
+ (oldtimestamp, oldimo, oldname, oldcallsign, oldtype,
+ olddim_bow, olddim_stern, olddim_port, olddim_starboard,
+ oldeta_M, oldeta_D, oldeta_h, oldeta_m,
+ olddraught, olddestination, oldsource
+ ) = unpack_to_str(AIVDM_RECORD5_FORMAT, oldrecord)
if timestamp > oldtimestamp:
# we have incoming recent information
if imo == 0:
dim_port = olddim_port
if dim_starboard == 0:
dim_starboard = olddim_starboard
- if eta_M == 0 or eta_D == 0 or eta_h == 24 or eta_m == 60 \
- or destination == '':
+ if (eta_M == 0 or eta_D == 0 or eta_h == 24 or eta_m == 60
+ or destination == ''):
eta_M = oldeta_M
eta_D = oldeta_D
eta_h = oldeta_h
destination = olddestination
if draught == 0:
draught = olddraught
- record = struct.pack(AIVDM_RECORD5_FORMAT, \
- timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
- eta_M, eta_D, eta_h, eta_m, draught, \
+ record = pack_from_str(AIVDM_RECORD5_FORMAT,
+ timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
+ eta_M, eta_D, eta_h, eta_m, draught,
destination, source)
f.seek(0)
f.write(record)
olddim_starboard = dim_starboard
updated = True
# FIXME
- if (oldeta_M == 0 or oldeta_D == 0 or olddestination == '') \
- and ((eta_M != 0 and eta_D != 0) or destination!=''):
+ if ((oldeta_M == 0 or oldeta_D == 0 or olddestination == '')
+ and ((eta_M != 0 and eta_D != 0) or destination != '')):
oldeta_M = eta_M
oldeta_D = eta_D
oldeta_h = eta_h
updated = True
if updated:
oldsource = source
- record = struct.pack(AIVDM_RECORD5_FORMAT, \
- oldtimestamp, oldimo, oldname, \
- oldcallsign, oldtype, \
- olddim_bow, olddim_stern, \
- olddim_port, olddim_starboard, \
- oldeta_M, oldeta_D, oldeta_h, oldeta_m, \
+ record = pack_from_str(AIVDM_RECORD5_FORMAT,
+ oldtimestamp, oldimo, oldname,
+ oldcallsign, oldtype,
+ olddim_bow, olddim_stern,
+ olddim_port, olddim_starboard,
+ oldeta_M, oldeta_D, oldeta_h, oldeta_m,
olddraught, olddestination, oldsource)
-
+
f.seek(0)
f.write(record)
# keep the file locked during SQL updates
if updated:
- _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type, \
- dim_bow, dim_stern, dim_port, dim_starboard, \
- eta_M, eta_D, eta_h, eta_m, draught, destination, source)
+ _sql_add_nmea5(strmmsi, timestamp, imo, name, callsign, type,
+ dim_bow, dim_stern, dim_port, dim_starboard,
+ eta_M, eta_D, eta_h, eta_m, draught, destination, source)
f.close()
return updated
+__misources__ = {} # cache of manual source names
+
-__misources__ = {} # cache of manual source names
def _get_mi_sourcename(id):
"""
Get the nice name for sources whose id4 starts with 'MI'
"""
global __misources__
if not __misources__:
- sqlexec(u'SELECT id, name FROM mi_source')
+ sqlexec('SELECT id, name FROM mi_source')
while True:
row = get_common_cursor().fetchone()
if row is None:
__misources__[row[0]] = row[1]
result = __misources__.get(id, None)
if result is None:
- return u"Manual input #%s" % id
+ return "Manual input #%s" % id
return result
def to_values(self):
return self.timestamp_1, self.status, self.rot, self.sog, self.latitude, self.longitude, self.cog, self.heading, self.source_1
+ @staticmethod
+ def __unpack(record):
+ return unpack_to_str(AIVDM_RECORD123_FORMAT, record)
+
def from_record(self, record):
- values = struct.unpack(AIVDM_RECORD123_FORMAT, record)
+ values = Nmea1.__unpack(record)
Nmea1.__init__(self, *values)
@staticmethod
def new_from_record(record):
- values = struct.unpack(AIVDM_RECORD123_FORMAT, record)
+ values = Nmea1.__unpack(record)
return Nmea1(*values)
def to_record(self):
return struct.pack(AIVDM_RECORD123_FORMAT, *Nmea1.to_values(self))
-
+
def from_file(self, file):
record = file.read(AIVDM_RECORD123_LENGTH)
Nmea1.from_record(self, record)
def from_lastinfo(self, strmmsi):
filename_nmea1 = DBPATH+'/last/'+_hash3_pathfilename(strmmsi+'.nmea1')
try:
- f = file(filename_nmea1, 'rb')
+ f = open(filename_nmea1, 'rb')
except IOError:
logging.debug("file %s doesn't exists" % filename_nmea1)
return
def new_from_lastinfo(strmmsi):
filename_nmea1 = DBPATH+'/last/'+_hash3_pathfilename(strmmsi+'.nmea1')
try:
- f = file(filename_nmea1, 'rb')
+ f = open(filename_nmea1, 'rb')
except IOError:
logging.debug("file %s doesn't exists" % filename_nmea1)
return None
f.close()
return Nmea1.new_from_record(record)
-
def dump_to_stdout(self):
"""
Prints content to stdout
"""
- print datetime.utcfromtimestamp(self.timestamp_1),
+ print(datetime.utcfromtimestamp(self.timestamp_1), end='')
for i in (self.status, self.rot, self.sog, self.latitude/AIS_LATLON_SCALE, self.longitude/AIS_LATLON_SCALE, self.cog, self.heading, self.source_1):
- print repr(i),
- print
-
+ print(repr(i), end='')
+ print()
+
@staticmethod
def _clean_str(txt):
if txt is None:
return ''
- return txt.replace('\0','').replace('@', '').strip()
+ return txt.replace('\0', '').replace('@', '').strip()
def get_status(self, default='Unknown'):
return STATUS_CODES.get(self.status, default)
-
+
def get_sog_str(self, default='Unknown'):
if self.sog == AIS_SOG_NOT_AVAILABLE:
return default
f = -f
e = emispheres[1]
result = '%d°' % int(f)
- f = (f%1)*60
+ f = (f % 1) * 60
result += '%02.05f\' ' % f
result += e
return result
def get_source_1_str(self):
return Nmea.format_source(self.source_1)
+
class Nmea5:
def __init__(self, timestamp, imo=0, name='', callsign='', type=0, dim_bow=0, dim_stern=0, dim_port=0, dim_starboard=0, eta_M=0, eta_D=0, eta_h=24, eta_m=60, draught=0, destination='', source=''):
self.timestamp_5 = timestamp
self.imo = imo
- self.name = name
+ self.name = name
self.callsign = callsign
self.type = type
self.dim_bow = dim_bow
def to_values(self):
return self.timestamp_5, self.imo, self.name, self.callsign, self.type, self.dim_bow, self.dim_stern, self.dim_port, self.dim_starboard, self.eta_M, self.eta_D, self.eta_h, self.eta_m, self.draught, self.destination, self.source_5
+ @staticmethod
+ def __unpack(record):
+ return unpack_to_str(AIVDM_RECORD5_FORMAT, record)
+
def from_record(self, record):
- values = struct.unpack(AIVDM_RECORD5_FORMAT, record)
+ values = Nmea5.__unpack(record)
Nmea5.__init__(self, *values)
@staticmethod
def new_from_record(record):
- values = struct.unpack(AIVDM_RECORD5_FORMAT, record)
+ values = Nmea5.__unpack(record)
return Nmea5(*values)
def to_record(self):
return struct.pack(AIVDM_RECORD5_FORMAT, *Nmea5.to_values(self))
-
+
def from_file(self, file):
record = file.read(AIVDM_RECORD5_LENGTH)
Nmea5.from_record(self, record)
'last',
_hash3_pathfilename(strmmsi+'.nmea5'))
try:
- f = file(filename_nmea5, 'rb')
+ f = open(filename_nmea5, 'rb')
except IOError:
logging.debug("file %s doesn't exists" % filename_nmea5)
return
def new_from_lastinfo(strmmsi):
filename_nmea5 = DBPATH+'/last/'+_hash3_pathfilename(strmmsi+'.nmea5')
try:
- f = file(filename_nmea5, 'rb')
+ f = open(filename_nmea5, 'rb')
except IOError:
logging.debug("file %s doesn't exists" % filename_nmea5)
return None
def _clean_str(txt):
if txt is None:
return ''
- return txt.replace('\0','').replace('@', '').strip()
+ return txt.replace('\0', '').replace('@', '').strip()
def get_name(self, default='Unknown'):
result = self._clean_str(self.name)
return self.dim_port + self.dim_starboard
_monthes = 'Jan,Feb,Mar,Apr,May,Jun,Jul,Aug,Sep,Oct,Nov,Dec'.split(',')
+
def get_eta_str(self, default='Unknown'):
if not self.eta_M and not self.eta_D:
return default
else:
result += ':%02d' % self.eta_m
return result
-
+
def get_draught_str(self, default='Unknown'):
if not self.draught:
return default
def get_source_5_str(self):
return Nmea.format_source(self.source_5)
+
class Nmea(Nmea1, Nmea5):
"""
This is nmea info, a merge of nmea1 and nmea5 packets
def from_lastinfo(self, strmmsi):
Nmea1.from_lastinfo(self, strmmsi)
Nmea5.from_lastinfo(self, strmmsi)
-
+
@staticmethod
def new_from_lastinfo(strmmsi):
# better than unimplemented, but not optimal
nmea.from_lastinfo(strmmsi)
return nmea
-
- def get_flag(self, default=u'Unknown'):
- if self.strmmsi.startswith('00') and self.strmmsi[3:5]!='MI':
+ def get_flag(self, default='Unknown'):
+ if self.strmmsi.startswith('00') and self.strmmsi[3:5] != 'MI':
ref_mmsi = self.strmmsi[2:]
else:
ref_mmsi = self.strmmsi
"""
lastupdate = self.get_last_timestamp()
if lastupdate == 0:
- return u'Never'
+ return 'Never'
dt_lastupdate = datetime.utcfromtimestamp(lastupdate)
delta = datetime.utcnow() - dt_lastupdate
- return nice_timedelta_str(delta) + u' ago'
+ return nice_timedelta_str(delta) + ' ago'
def get_last_updated_str(self):
"""
"""
lastupdate = self.get_last_timestamp()
if lastupdate == 0:
- return u'Never'
+ return 'Never'
dt_lastupdate = datetime.utcfromtimestamp(lastupdate)
delta = datetime.utcnow() - dt_lastupdate
return dt_lastupdate.strftime('%Y-%m-%d %H:%M:%S GMT') + ' (' + nice_timedelta_str(delta) + ' ago)'
@staticmethod
def format_source(infosrc):
if infosrc == '\0\0\0\0':
- return u'(empty)'
+ return '(empty)'
elif infosrc.startswith('MI'):
if len(infosrc) == 4:
return _get_mi_sourcename(struct.unpack('<2xH', infosrc)[0])
else:
- return u'Manual input'
+ return 'Manual input'
elif infosrc.startswith('U'):
- return u'User input'
+ return 'User input'
elif infosrc.startswith('NM'):
- return u'NMEA packets from '+xml_escape(infosrc[2:])
+ return 'NMEA packets from '+xml_escape(infosrc[2:])
elif infosrc.startswith('SP'):
- return u"ShipPlotter user %s" % infosrc[2:]
+ return "ShipPlotter user %s" % infosrc[2:]
elif infosrc.startswith('ST'):
- return u"Spot track %s" % infosrc[2:]
- elif infosrc == u'MTWW':
- return u'MarineTraffic.com web site'
- elif infosrc == u'MTTR':
- return u'MarineTraffic.com track files'
+ return "Spot track %s" % infosrc[2:]
+ elif infosrc == 'MTWW':
+ return 'MarineTraffic.com web site'
+ elif infosrc == 'MTTR':
+ return 'MarineTraffic.com track files'
else:
return infosrc
'imo': lambda nmea: str(nmea.imo),
'callsign': Nmea5.get_callsign,
'type': lambda nmea: str(nmea.type) + '-' + nmea.get_shiptype(),
- 'length':lambda nmea: str(nmea.get_length()),
+ 'length': lambda nmea: str(nmea.get_length()),
'width': lambda nmea: str(nmea.get_width()),
'datetime': lambda nmea: datetime.utcfromtimestamp(nmea.get_last_timestamp()).strftime('%Y-%m-%dT%H:%M:%SZ'),
'status': Nmea1.get_status,
result.append(f(self))
return result
- #def get_dump_row(self):
- # result = []
- # def _clean(txt):
- # if txt is None:
- # return ''
- # return txt.replace('\0','').replace('@', '').strip()
-
- # result.append(self.strmmsi)
- # result.append(self.get_flag().encode('utf-8'))
- # result.append(self.get_name())
- # result.append(str(self.imo))
- # result.append(_clean(self.callsign))
- # result.append(str(self.type) + '-' + SHIP_TYPES.get(self.type, 'unknown'))
- # d = self.dim_bow + self.dim_stern
- # if d:
- # result.append(d)
- # else:
- # result.append(None)
- # d = self.dim_port + self.dim_starboard
- # if d:
- # result.append(d)
- # else:
- # result.append(None)
- # result.append(datetime.utcfromtimestamp(self.timestamp_1).strftime('%Y-%m-%dT%H:%M:%SZ'))
- # result.append(STATUS_CODES.get(self.status, 'unknown'))
- # if self.sog != AIS_SOG_NOT_AVAILABLE:
- # result.append(str(self.sog/AIS_SOG_SCALE))
- # else:
- # result.append(None)
- # if self.latitude != AIS_LAT_NOT_AVAILABLE:
- # result.append(str(self.latitude/AIS_LATLON_SCALE))
- # else:
- # result.append(None)
- # if self.longitude != AIS_LON_NOT_AVAILABLE:
- # result.append(str(self.longitude/AIS_LATLON_SCALE))
- # else:
- # result.append(None)
- # if self.cog != AIS_COG_NOT_AVAILABLE:
- # result.append(str(self.cog/10.))
- # else:
- # result.append(None)
- # if self.heading != AIS_NO_HEADING:
- # result.append(str(self.heading))
- # else:
- # result.append(None)
- # result.append(self.get_destination(''))
- # result.append(self.get_eta_str(''))
- # result.append(self.draught)
- # result.append(self.source_5)
- # return result
+ # def get_dump_row(self):
+ # result = []
+ # def _clean(txt):
+ # if txt is None:
+ # return ''
+ # return txt.replace('\0','').replace('@', '').strip()
+ #
+ # result.append(self.strmmsi)
+ # result.append(self.get_flag().encode('utf-8'))
+ # result.append(self.get_name())
+ # result.append(str(self.imo))
+ # result.append(_clean(self.callsign))
+ # result.append(str(self.type) + '-' + SHIP_TYPES.get(self.type, 'unknown'))
+ # d = self.dim_bow + self.dim_stern
+ # if d:
+ # result.append(d)
+ # else:
+ # result.append(None)
+ # d = self.dim_port + self.dim_starboard
+ # if d:
+ # result.append(d)
+ # else:
+ # result.append(None)
+ # result.append(datetime.utcfromtimestamp(self.timestamp_1).strftime('%Y-%m-%dT%H:%M:%SZ'))
+ # result.append(STATUS_CODES.get(self.status, 'unknown'))
+ # if self.sog != AIS_SOG_NOT_AVAILABLE:
+ # result.append(str(self.sog/AIS_SOG_SCALE))
+ # else:
+ # result.append(None)
+ # if self.latitude != AIS_LAT_NOT_AVAILABLE:
+ # result.append(str(self.latitude/AIS_LATLON_SCALE))
+ # else:
+ # result.append(None)
+ # if self.longitude != AIS_LON_NOT_AVAILABLE:
+ # result.append(str(self.longitude/AIS_LATLON_SCALE))
+ # else:
+ # result.append(None)
+ # if self.cog != AIS_COG_NOT_AVAILABLE:
+ # result.append(str(self.cog/10.))
+ # else:
+ # result.append(None)
+ # if self.heading != AIS_NO_HEADING:
+ # result.append(str(self.heading))
+ # else:
+ # result.append(None)
+ # result.append(self.get_destination(''))
+ # result.append(self.get_eta_str(''))
+ # result.append(self.draught)
+ # result.append(self.source_5)
+ # return result
class BankNmea1(list):
File must be locked before call
File should be truncated after call
'''
- for nmea1 in list.__iter__(self): # self.__iter__ reload the bank, we don't want that
+ for nmea1 in list.__iter__(self): # self.__iter__ reload the bank, we don't want that
file.write(nmea1.to_record())
def __load(self):
raise
self.__load_from_file(file)
file.close()
-
+
def __iter__(self):
"""
Each call reload the file
return list.__iter__(self)
def packday(self, remove_manual_input=False, remove_source_name=None):
- #print "MMSI", strmmsi
+ #print("MMSI", strmmsi)
filename = self.get_filename()
try:
file_must_be_unlinked = True
file.close()
-
+
if file_must_be_unlinked:
# FIXME we release the lock before unlinking
# another process might encounter an empty file (not handled)
nmea1.dump_to_stdout()
def sort_by_date(self):
- self.sort(lambda n1, n2: n1.timestamp_1 - n2.timestamp_1)
+ self.sort(key=lambda n: n.timestamp_1)
def sort_by_date_reverse(self):
- self.sort(lambda n1, n2: n2.timestamp_1 - n1.timestamp_1)
+ self.sort(key=lambda n: n.timestamp_1, reverse=True)
def remove_duplicate_timestamp(self):
file_has_changed = False
last_timestamp = self[i].timestamp_1
i += 1
return file_has_changed
-
+
def remove_manual_input(self):
file_has_changed = False
i = 0
file_has_changed = False
i = 0
while i < len(self):
- #logging.debug('Testing %s ...', self[i].source_1)
+ # logging.debug('Testing %s ...', self[i].source_1)
if self[i].source_1.startswith(source_name_start):
- #logging.debug('Deleting ...')
+ # logging.debug('Deleting ...')
del self[i]
file_has_changed = True
else:
- #logging.debug('Keeping ...')
+ # logging.debug('Keeping ...')
i += 1
return file_has_changed
+
class Nmea1Feeder:
"""
Yields all nmea1 packets between two given datetimes
return
if nmea1.timestamp_1 > ts_end:
continue
-
+
yield nmea1
-
+
count += 1
if self.max_count and count >= self.max_count:
return
raise
self.__load_from_file(file)
file.close()
-
+
def __iter__(self):
"""
Each call reload the file
return list.__iter__(self)
def sort_by_date(self):
- self.sort(lambda n1, n2: n1.timestamp_5 - n2.timestamp_5)
+ self.sort(key=lambda n: n.timestamp_5)
def sort_by_date_reverse(self):
- self.sort(lambda n1, n2: n2.timestamp_5 - n1.timestamp_5)
+ self.sort(key=lambda n: n.timestamp_5, reverse=True)
class Nmea5Feeder:
"""
return
if nmea1.timestamp_5 > ts_end:
continue
-
+
yield nmea1
-
+
count += 1
if self.max_count and count >= self.max_count:
return
else:
nmea5_datetime_begin = None
nmea5_iterator = Nmea5Feeder(self.strmmsi, self.datetime_end, nmea5_datetime_begin).__iter__()
- nmea5 = Nmea5(self.strmmsi, sys.maxint)
+ nmea5 = Nmea5(timestamp=sys.maxsize)
count = 0
- lasttimestamp = sys.maxint
+ lasttimestamp = sys.maxsize
for nmea1 in Nmea1Feeder(self.strmmsi, self.datetime_end, self.datetime_begin):
Nmea1.from_values(nmea, *nmea1.to_values())
-
+
# try to get an nmea5 paket older
nmea5_updated = False
while nmea5 is not None and nmea5.timestamp_5 > nmea1.timestamp_1:
try:
- nmea5 = nmea5_iterator.next()
+ nmea5 = next(nmea5_iterator)
nmea5_updated = True
except StopIteration:
nmea5 = None
-
+
if nmea5_updated and nmea5 is not None:
Nmea5.merge_from_values(nmea, *nmea5.to_values())
strdelta = 'less than a second '
return strdelta
+
def all_mmsi_generator():
"""
Returns an array of all known strmmsi.
Returns an array of strmmsi.
"""
result = []
- sqlexec(u"SELECT mmsi FROM fleet_vessel WHERE fleet_id=" + unicode(fleetid))
+ sqlexec("SELECT mmsi FROM fleet_vessel WHERE fleet_id=" + unicode(fleetid))
cursor = get_common_cursor()
while True:
row = cursor.fetchone()
def fleetname_to_fleetid(fleetname):
- sqlexec(u"SELECT id FROM fleet WHERE name=%(fleetname)s", {'fleetname': fleetname})
+ sqlexec("SELECT id FROM fleet WHERE name=%(fleetname)s", {'fleetname': fleetname})
cursor = get_common_cursor()
row = cursor.fetchone()
return row[0]
-
def mmsiiterator_nohiddenship(mmsiiterator):
'''
filters strmmsi from an strmmsi iterator
if mmsi not in ais.inputs.config.get_hidden_mmsi():
yield strmmsi
+
def filter_area(nmea, area):
"""
Returns false if position is out of area.
return False
return True
+
def filter_close_to(nmea, lat, lon, miles=1.0):
'''
Returns true if position is closer than miles from (lat, lon)
_filter_positioncheck_last_mmsi = None
+
+
def filter_speedcheck(nmea, max_mps):
"""
mps is miles per seconds
parser.add_option('--filter-speedcheck',
action='store', type='int', dest='speedcheck', default=200, metavar='KNOTS',
- help='Eliminate erroneaous positions from results,'
+ help='Eliminate erroneaous positions from results,'
' based on impossible speed.')
parser.add_option('--filter-type',
(options, args) = parser.parse_args()
-
if options.help_types:
- print "Known ship types:"
+ print("Known ship types:")
keys = SHIP_TYPES.keys()
keys.sort()
for k in keys:
- print k, SHIP_TYPES[k]
+ print(k, SHIP_TYPES[k])
sys.exit(0)
DBPATH = options.db
#
if len(args)==0:
- print >> sys.stderr, "No ship to process"
+ print("No ship to process", file=sys.stderr)
sys.exit(1)
- target_mmsi_iterator = [] # strmmsi
+ target_mmsi_iterator = [] # strmmsi
all_targets = False
for arg in args:
if arg == 'all':
elif len(options.sdt_start)==8:
options.sdt_start = datetime.strptime(options.sdt_start, '%Y%m%d')
else:
- print >> sys.stderr, "Invalid format for --start option"
+ print("Invalid format for --start option", file=sys.stderr)
sys.exit(1)
if options.sdt_end:
# remove non digit characters
options.sdt_end = "".join([ c for c in options.sdt_end if c.isdigit()])
- if len(options.sdt_end)==14:
+ if len(options.sdt_end) == 14:
options.sdt_end = datetime.strptime(options.sdt_end, '%Y%m%d%H%M%S')
- elif len(options.sdt_end)==8:
+ elif len(options.sdt_end) == 8:
options.sdt_end = datetime.strptime(options.sdt_end, '%Y%m%d')
options.sdt_end = datetime.combine(options.sdt_end.date(), time(23, 59, 59))
else:
- print >> sys.stderr, "Invalid format for --end option"
+ print("Invalid format for --end option", file=sys.stderr)
sys.exit(1)
-
+
if options.sdt_duration:
# remove spaces
options.sdt_duration = options.sdt_duration.replace(' ', '')
duration_unit = 60
elif options.sdt_duration[-1] == 'H':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 60*60
+ duration_unit = 60 * 60
elif options.sdt_duration[-1] == 'D':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 24*60*60
+ duration_unit = 24 * 60 * 60
elif options.sdt_duration[-1] == 'W':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 7*24*60*60
+ duration_unit = 7 * 24 * 60 * 60
else:
duration_unit = 1
try:
- options.sdt_duration = long(options.sdt_duration)
+ options.sdt_duration = int(options.sdt_duration)
except ValueError:
- print >> sys.stderr, "Can't parse duration"
+ print("Can't parse duration", file=sys.stderr)
sys.exit(1)
options.sdt_duration = timedelta(0, options.sdt_duration * duration_unit)
if options.sdt_start or options.sdt_duration or options.granularity is not None or options.max_count:
# Time period is enabled (note that date_end only defaults to one day archives ending then)
if not options.sdt_start and not options.sdt_end and not options.sdt_duration:
- options.sdt_duration = timedelta(1) # One day
+ options.sdt_duration = timedelta(1) # One day
# continue without else
if not options.sdt_start and not options.sdt_end and options.sdt_duration:
dt_end = datetime.utcnow()
dt_start = dt_end - options.sdt_duration
- #elif not options.sdt_start and options.sdt_end and not options.sdt_duration:
+ # elif not options.sdt_start and options.sdt_end and not options.sdt_duration:
# never reached
elif not options.sdt_start and options.sdt_end and options.sdt_duration:
dt_end = options.sdt_end
dt_end = options.sdt_end
else:
assert options.sdt_start and options.sdt_end and options.sdt_duration, 'Internal error'
- print >> sys.stderr, "You can't have all 3 --start --end and --duration"
+ print("You can't have all 3 --start --end and --duration", file=sys.stderr)
sys.exit(1)
if options.granularity is None:
options.granularity = 600
options.max_count = 1
if options.granularity is None:
options.granularity = 600
-
+
logging.debug('--start is %s', dt_start)
logging.debug('--end is %s', dt_end)
#
filters = []
-
+
if options.filter_knownposition:
filters.append(filter_knownposition)
if options.speedcheck != 0:
- maxmps = options.speedcheck / 3600. # from knots to NM per seconds
+ maxmps = options.speedcheck / 3600 # from knots to NM per seconds
filters.append(lambda nmea: filter_speedcheck(nmea, maxmps))
if options.area_file:
area = load_area_from_kml_polygon(options.area_file)
filters.append(lambda nmea: filter_area(nmea, area))
-
+
if options.close_to:
try:
lat = clean_latitude(unicode(options.close_to[0], 'utf-8'))
lon = clean_longitude(unicode(options.close_to[1], 'utf-8'))
except LatLonFormatError as err:
- print >> sys.stderr, err.args
+ print(sys.stderr, err.args, file=sys.stderr)
sys.exit(1)
miles = float(options.close_to[2])
filters.append(lambda nmea: filter_close_to(nmea, lat, lon, miles))
lat = clean_latitude(unicode(options.far_from[0], 'utf-8'))
lon = clean_longitude(unicode(options.far_from[1], 'utf-8'))
except LatLonFormatError as err:
- print >> sys.stderr, err.args
+ print(err.args, file=sys.stderr)
sys.exit(1)
miles = float(options.far_from[2])
filters.append(lambda nmea: filter_far_from(nmea, lat, lon, miles))
-
+
if options.sog_le:
filters.append(lambda nmea: filter_sog_le(nmea, float(options.sog_le)))
if options.sog_ge:
elif options.action == 'removemanual':
if filters:
- print >> sys.stderr, "removemanual action doesn't support filters"
+ print("removemanual action doesn't support filters", file=sys.stderr)
sys.exit(1)
# TODO: dates = range dt_start, dt_end
for mmsi in target_mmsi_iterator:
BankNmea1(mmsi, dt).packday(remove_manual_input=True)
dt = dt + timedelta(1)
-
+
elif options.action == 'removebysource':
if filters:
- print >> sys.stderr, "removebysource action doesn't support filters"
+ print("removebysource action doesn't support filters", file=sys.stderr)
sys.exit(1)
# TODO: dates = range dt_start, dt_end
if BankNmea1(mmsi, dt).packday(remove_source_name='MT'):
logging.info('File was modified. mmsi=%s dt=%s', mmsi, dt)
dt = dt + timedelta(1)
-
+
elif options.action == 'mmsidump':
- for strmmsi in target_mmsi_iterator :
- print strmmsi
+ for strmmsi in target_mmsi_iterator:
+ print(strmmsi)
elif options.action == 'fixdestination':
for mmsi in target_mmsi_iterator:
for nmea in NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count):
destination = nmea.destination.rstrip(' @\0')
if destination:
- sqlexec(u'UPDATE vessel SET destination = %(destination)s WHERE mmsi=%(mmsi)s AND destination IS NULL', {'mmsi':strmmsi_to_mmsi(mmsi), 'destination':destination})
+ sqlexec('UPDATE vessel SET destination = %(destination)s WHERE mmsi=%(mmsi)s AND destination IS NULL', {'mmsi': strmmsi_to_mmsi(mmsi), 'destination': destination})
logging.info('%s -> %s', mmsi, repr(destination))
dbcommit()
- break # go to next mmsi
+ break # go to next mmsi
if __name__ == '__main__':
-# -*- encoding: utf-8 -*-
'''
database library.
'''
-from __future__ import division
__all__ = [
'get_common_db',
DATABASE_CONFIG_FILE = '/etc/ais/database'
DUMP_SQL_QUERIES = False
+
def _get_connect_str():
'''
Returns a connection string suitable for psycopg
dbname = cfg.get('dbname', None)
assert dbname, u'You must define a database name'
connectstr = u'dbname=' + dbname
-
+
host = cfg.get('host', None)
if host:
connectstr += u' host=' + host
if password:
connectstr += u' password=' + password
return connectstr
-
+
+
__db__ = None
+
+
def get_common_db():
'''
Returns a singleton on a psycoPG database connection
__db__.set_isolation_level(psyext.ISOLATION_LEVEL_AUTOCOMMIT)
return __db__
+
__cursor__ = None
+
+
def get_common_cursor():
'''
Returns a singleton on a psycoPG database cursor
__cursor__ = get_common_db().cursor()
return __cursor__
+
def sql_setdebug(isdebug):
'''
Turns on/off sql loggin to stdout
global DUMP_SQL_QUERIES
DUMP_SQL_QUERIES = isdebug
+
def sqlexec(sql, *args, **kargs):
'''
Execute an sql statement, using the common cursor
'''
cursor = get_common_cursor()
if DUMP_SQL_QUERIES:
- print cursor.mogrify(sql.encode('utf8'), *args, **kargs)
+ print(cursor.mogrify(sql.encode('utf8'), *args, **kargs))
cursor.execute(sql, *args, **kargs)
+
def dbcommit():
'''
Commit singleton cursor.
-#!/usr/bin/python
-from __future__ import division
-from django.core.management import execute_manager
-try:
- from ais.djais import settings
-except ImportError:
- import sys
- sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
- sys.exit(1)
+#!/usr/bin/python3
+import os
+import sys
if __name__ == "__main__":
- execute_manager(settings)
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ais.djais.settings")
+ from django.core.management import execute_from_command_line
+ execute_from_command_line(sys.argv)
-# -*- coding: utf-8 -*-
'''
Example usage:
def auth(username, password):
def myview(request):
return HttpResponse("Hello world!")
'''
-from __future__ import division
from django.http import HttpResponse
import base64
auth = request.META['HTTP_AUTHORIZATION']
assert auth.startswith('Basic '), \
'Invalid authentification scheme'
- username, password = base64.decodestring(auth[len('Basic '):]).split(':', 2)
+ auth = str(base64.b64decode(auth[len('Basic '):]), 'UTF-8')
+ username, password = auth.split(':', 2)
user = self.passwd_checker(username, password)
if not user:
return HttpResponseAuthenticate("Invalid username/password", realm=self.realm)
-# -*- coding: utf-8 -*-
"""
This context processor just add a "ais_base_url" key that's allways available
"""
from ais.djais.settings import AIS_BASE_URL
from django.utils.safestring import mark_safe
-from urlparse import urlsplit
+from urllib.parse import urlsplit
logout_url = 'https://logout@' + urlsplit(AIS_BASE_URL).netloc + '/logout'
-# -*- coding: utf-8 -*-
-
-from __future__ import division
import os, os.path
from datetime import datetime
from django.db import models
-from django.contrib.auth.models import get_hexdigest
+from django.contrib.auth import hashers
from django.utils import html
from ais.ntools import mmsi_to_strmmsi
class UserMessageCategory(models.Model):
id = models.CharField(max_length=10, primary_key=True)
class Meta:
- db_table = u'user_message_category'
+ db_table = 'user_message_category'
class UserMessage(models.Model):
id = models.AutoField(primary_key=True)
category = models.ForeignKey(UserMessageCategory, db_column='user_message_category_id')
txt = models.TextField()
class Meta:
- db_table = u'user_message'
+ db_table = 'user_message'
class User(models.Model):
id = models.AutoField(primary_key=True)
login = models.CharField(max_length=16, unique=True)
- password_hash = models.CharField(max_length=75)
+ password_hash = models.CharField(max_length=128)
name = models.CharField(max_length=50)
email = models.EmailField()
father = models.ForeignKey('User')
access_datetime = models.DateTimeField(blank=True, null=True)
flag_allowhidden = models.BooleanField(default=False)
class Meta:
- db_table = u'user'
+ db_table = 'user'
ordering = ('id',)
def __unicode__(self):
return self.login
def set_password(self, raw_password):
- import random
- algo = 'sha1'
- salt = get_hexdigest(algo, str(random.random()), str(random.random()))[:5]
- hsh = get_hexdigest(algo, salt, raw_password)
- self.password_hash = '%s$%s$%s' % (algo, salt, hsh)
+ self.password_hash = hashers.make_password(raw_password)
self.info('Password changed') # FIXME
def check_password(self, raw_password):
password_hash = self.password_hash
if not password_hash:
return False
- algo, salt, hsh = password_hash.split('$')
- return hsh == get_hexdigest(algo, salt, raw_password)
-
+ return hashers.check_password(raw_password, password_hash)
def update_access_datetime(self):
self.access_datetime = datetime.utcnow()
return messages_dict
def info(self, message):
- UserMessage(user_id = self.id, category_id=u'info', txt=html.escape(message)).save()
+ UserMessage(user_id = self.id, category_id='info', txt=html.escape(message)).save()
def error(self, message):
- UserMessage(user_id = self.id, category_id=u'error', txt=html.escape(message)).save()
+ UserMessage(user_id = self.id, category_id='error', txt=html.escape(message)).save()
def check_sandbox_access(self, source_user=None):
SANDBOX_FLEET = 1
dim_starboard = models.IntegerField(default=0)
eta = models.CharField(max_length=8, default='00002460') # format MMDDhhmm
class Meta:
- db_table = u'vessel'
+ db_table = 'vessel'
def __unicode__(self):
return unicode(self.mmsi) # FIXME
def get_last_nmea(self):
vessel = models.ManyToManyField(Vessel, through='FleetVessel')
description = models.TextField()
class Meta:
- db_table = u'fleet'
+ db_table = 'fleet'
def __unicode__(self):
return self.name
def vessel_count(self):
fleet = models.ForeignKey(Fleet) #, db_column='fleet_id', to_field='id')
user = models.ForeignKey(User)
class Meta:
- db_table = u'fleet_user'
+ db_table = 'fleet_user'
class FleetVessel(models.Model):
id = models.AutoField(primary_key=True)
fleet = models.ForeignKey(Fleet, db_column='fleet_id', to_field='id')
vessel = models.ForeignKey(Vessel, db_column='mmsi', to_field='mmsi')
class Meta:
- db_table = u'fleet_vessel'
+ db_table = 'fleet_vessel'
## manual input source
#class MiSource(models.Model):
# userid = models.IntegerField()
# name = models.TextField(unique=True)
# class Meta:
-# db_table = u'mi_source'
+# db_table = 'mi_source'
#
## manual input vessel
#class MiVessel(models.Model):
# mmsi_txt = models.TextField(primary_key=True) # This field type is a guess.
# class Meta:
-# db_table = u'mi_vessel'
+# db_table = 'mi_vessel'
# Plane plotter
# lat = models.FloatField()
# lon = models.FloatField()
# class Meta:
-# db_table = u'ppuser'
+# db_table = 'ppuser'
#
#class Plane(models.Model):
# flight = models.CharField(max_length=8)
# usr = models.TextField() # This field type is a guess.
# updated = models.DateTimeField()
# class Meta:
-# db_table = u'plane'
+# db_table = 'plane'
title = models.TextField()
txt = models.TextField()
class Meta:
- db_table = u'news'
+ db_table = 'news'
class Job(models.Model):
raise
class Meta:
- db_table = u'job'
+ db_table = 'job'
ordering = ('queue_time',)
# Django settings for ais project.
-from __future__ import division
from ais.ntools import read_cfg
from ais.db import DATABASE_CONFIG_FILE
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = 'o^hhivzd2!1s73#c4_zlvz#v+i4kyzdr9+#vw824is!rf8&mgp'
+
+# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
-TEMPLATE_DEBUG = DEBUG
-ADMINS = (
- # ('Your Name', 'your_email@domain.com'),
+ALLOWED_HOSTS = ['*']
+
+# Application definition
+INSTALLED_APPS = (
+# 'django.contrib.auth',
+# 'django.contrib.contenttypes',
+# 'django.contrib.sessions',
+# 'django.contrib.sites',
+ 'ais.djais',
+# 'django.contrib.staticfiles',
+)
+
+MIDDLEWARE = (
+ 'django.middleware.common.CommonMiddleware',
+# 'django.contrib.sessions.middleware.SessionMiddleware',
+# 'django.contrib.auth.middleware.AuthenticationMiddleware',
)
-MANAGERS = ADMINS
+ROOT_URLCONF = 'ais.djais.urls'
-DATABASE_ENGINE = 'postgresql_psycopg2' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [ '/usr/share/ais/www_templates' ],
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ 'django.template.context_processors.debug',
+ 'django.template.context_processors.i18n',
+ 'django.template.context_processors.media',
+ 'django.template.context_processors.request', # Added by Nirgal
+ 'django.template.context_processors.static',
+ 'django.contrib.messages.context_processors.messages',
+ 'ais.djais.context.logout',
+ ],
+ 'debug': DEBUG,
+ },
+ },
+]
+
+# Database
+# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
__cfg__ = read_cfg(DATABASE_CONFIG_FILE)
-DATABASE_NAME = __cfg__['dbname'] # Or path to database file if using sqlite3.
-DATABASE_USER = __cfg__.get('user', '') # Not used with sqlite3.
-DATABASE_PASSWORD = __cfg__.get('password', '') # Not used with sqlite3.
-DATABASE_HOST = __cfg__.get('host', '') # Set to empty string for localhost. Not used with sqlite3.
-DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
-
-# Local time zone for this installation. Choices can be found here:
-# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
-# although not all choices may be available on all operating systems.
-# If running in a Windows environment this must be set to the same as your
-# system time zone.
-TIME_ZONE = 'GMT'
-
-# Language code for this installation. All choices can be found here:
-# http://www.i18nguy.com/unicode/language-identifiers.html
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': __cfg__['dbname'],
+ 'USER': __cfg__.get('user', ''),
+ 'PASSWORD': __cfg__.get('password', ''),
+ 'HOST': __cfg__.get('host', ''),
+ 'PORT': '', # Set to empty string for default.
+ },
+}
+
+# Internationalization
+# https://docs.djangoproject.com/en/1.8/topics/i18n/
+
LANGUAGE_CODE = 'en-gb'
-SITE_ID = 1
+TIME_ZONE = 'UTC'
-# If you set this to False, Django will make some optimizations so as not
-# to load the internationalization machinery.
USE_I18N = True
+USE_L10N = True
+
+USE_TZ = False
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/1.8/howto/static-files/
+STATIC_ROOT = '/home/nirgal/ais/www/'
+
+STATIC_URL = '/'
+
+STATICFILES_DIRS = (
+ ('javascript', '/usr/share/javascript'),
+)
+
+
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
-MEDIA_ROOT = '/usr/share/ais/www/'
+MEDIA_ROOT = '/var/lib/ais/media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
-MEDIA_URL = '/'
+MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
-ADMIN_MEDIA_PREFIX = '/media/'
-
-# Make this unique, and don't share it with anybody.
-SECRET_KEY = 'o^hhivzd2!1s73#c4_zlvz#v+i4kyzdr9+#vw824is!rf8&mgp'
-
-# List of callables that know how to import templates from various sources.
-TEMPLATE_LOADERS = (
- 'django.template.loaders.filesystem.load_template_source',
- 'django.template.loaders.app_directories.load_template_source',
-# 'django.template.loaders.eggs.load_template_source',
-)
-
-MIDDLEWARE_CLASSES = (
-# 'django.middleware.common.CommonMiddleware',
-# 'django.contrib.sessions.middleware.SessionMiddleware',
-# 'django.contrib.auth.middleware.AuthenticationMiddleware',
-)
-
-ROOT_URLCONF = 'ais.djais.urls'
-
-TEMPLATE_DIRS = (
- # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
- # Always use forward slashes, even on Windows.
- # Don't forget to use absolute paths, not relative paths.
- '/usr/share/ais/www_templates',
-)
-
-INSTALLED_APPS = (
-# 'django.contrib.auth',
-# 'django.contrib.contenttypes',
-# 'django.contrib.sessions',
-# 'django.contrib.sites',
- 'ais.djais',
-)
-
-TEMPLATE_CONTEXT_PROCESSORS = (
- "django.contrib.auth.context_processors.auth",
- "django.core.context_processors.debug",
- "django.core.context_processors.i18n",
- "django.core.context_processors.media",
- "django.core.context_processors.request", # Added by Nirgal
- "django.contrib.messages.context_processors.messages",
- "ais.djais.context.logout",
-)
+#ADMIN_MEDIA_PREFIX = '/media/'
+AUTH_USER_MODEL='djais.User'
AIS_BASE_URL='https://ais.nirgal.com'
NOTIFICATION_EMAIL='Job runner <contact_ais@nirgal.com>'
-# -*- encofing: utf8 -*-
-from __future__ import division
from django import template
from django.utils.html import escape
from ais.ntools import mmsi_to_strmmsi
register = template.Library()
+
@register.filter
def sqlmmsi_to_strmmsi(txt):
- return unicode(mmsi_to_strmmsi(txt))
+ return mmsi_to_strmmsi(txt)
+
@register.filter
def format_user_tree(user, requestuser=None, indentlevel=0):
-from __future__ import division
-from django.conf.urls.defaults import *
-import ais.djais
+from django.conf.urls import url
+from django.conf import settings
+from django.conf.urls.static import static
+from django.contrib.staticfiles import views
+from django.contrib.staticfiles.urls import staticfiles_urlpatterns
+
+
+
+import ais.djais.views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
-urlpatterns = patterns('',
- (r'^$', 'ais.djais.views.index'),
- (r'^fleet/$', 'ais.djais.views.fleets'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/$', 'ais.djais.views.fleet'),
- (r'^fleet/add$', 'ais.djais.views.fleet_edit', { 'fleetname': None}),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/edit$', 'ais.djais.views.fleet_edit'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/list$', 'ais.djais.views.fleet_vessels'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add$', 'ais.djais.views.fleet_vessel_add'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add_vessel$', 'ais.djais.views.fleet_vessel_add2'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/remove_vessel$', 'ais.djais.views.fleet_vessel_remove'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/users$', 'ais.djais.views.fleet_users'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/lastpos$', 'ais.djais.views.fleet_lastpos'),
- (r'^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/history$', 'ais.djais.views.fleet_history'),
-
- (r'^vessel/$', 'ais.djais.views.vessel_search'),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/$', 'ais.djais.views.vessel'),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/lastpos$', 'ais.djais.views.vessel_lastpos'),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/manual_input$', 'ais.djais.views.vessel_manual_input'),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/history$', 'ais.djais.views.vessel_history'),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/track$', 'ais.djais.views.vessel_history', {'format': u'track'}),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/animation$', 'ais.djais.views.vessel_history', {'format': u'animation'}),
- (r'^vessel/(?P<strmmsi>[0-9A-Z]{9})/csv$', 'ais.djais.views.vessel_history', {'format': u'csv'}),
- (r'^user/$', 'ais.djais.views.users'),
- (r'^user/(?P<login>[a-zA-Z0-9_]+)/$', 'ais.djais.views.user_detail'),
- (r'^user/(?P<login>[a-zA-Z0-9_]+)/edit$', 'ais.djais.views.user_edit'),
- (r'^user/add$', 'ais.djais.views.user_edit', {'login':None} ),
- (r'^user/(?P<login>[a-zA-Z0-9_]+)/change_password$', 'ais.djais.views.user_change_password'),
- (r'^user/(?P<login>[a-zA-Z0-9_]+)/delete$', 'ais.djais.views.user_delete'),
- (r'^job/$', 'ais.djais.views.jobs_index'),
- (r'^job/(?P<jobid>[A-Z0-9]+)/$', 'ais.djais.views.job_detail'),
- (r'^job/(?P<jobid>[A-Z0-9]+)/download$', 'ais.djais.views.job_get'),
- (r'^job/(?P<jobid>[A-Z0-9]+)/log$', 'ais.djais.views.job_log'),
- (r'^source/$', 'ais.djais.views.sources_index'),
- (r'^source/stats$', 'ais.djais.views.sources_stats'),
- (r'^news/(?P<page>\d*)$', 'ais.djais.views.news'),
- (r'^news/feed', 'ais.djais.views.news_atom'),
- (r'^logout$', 'ais.djais.views.logout'),
+urlpatterns = [
+ url('^$', ais.djais.views.index),
+ url('^fleet/$', ais.djais.views.fleets),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/$', ais.djais.views.fleet),
+ url('^fleet/add$', ais.djais.views.fleet_edit, { 'fleetname': None}),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/edit$', ais.djais.views.fleet_edit),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/list$', ais.djais.views.fleet_vessels),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add$', ais.djais.views.fleet_vessel_add),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/add_vessel$', ais.djais.views.fleet_vessel_add2),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/remove_vessel$', ais.djais.views.fleet_vessel_remove),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/users$', ais.djais.views.fleet_users),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/lastpos$', ais.djais.views.fleet_lastpos),
+ url('^fleet/(?P<fleetname>[ a-zA-Z0-9_]+)/history$', ais.djais.views.fleet_history),
+
+ url('^vessel/$', ais.djais.views.vessel_search),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/$', ais.djais.views.vessel),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/lastpos$', ais.djais.views.vessel_lastpos),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/manual_input$', ais.djais.views.vessel_manual_input),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/history$', ais.djais.views.vessel_history),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/track$', ais.djais.views.vessel_history, {'format': 'track'}),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/animation$', ais.djais.views.vessel_history, {'format': 'animation'}),
+ url('^vessel/(?P<strmmsi>[0-9A-Z]{9})/csv$', ais.djais.views.vessel_history, {'format': 'csv'}),
+ url('^user/$', ais.djais.views.users),
+ url('^user/(?P<login>[a-zA-Z0-9_]+)/$', ais.djais.views.user_detail),
+ url('^user/(?P<login>[a-zA-Z0-9_]+)/edit$', ais.djais.views.user_edit),
+ url('^user/add$', ais.djais.views.user_edit, {'login': None} ),
+ url('^user/(?P<login>[a-zA-Z0-9_]+)/change_password$', ais.djais.views.user_change_password),
+ url('^user/(?P<login>[a-zA-Z0-9_]+)/delete$', ais.djais.views.user_delete),
+ url('^job/$', ais.djais.views.jobs_index),
+ url('^job/(?P<jobid>[A-Z0-9]+)/$', ais.djais.views.job_detail),
+ url('^job/(?P<jobid>[A-Z0-9]+)/download$', ais.djais.views.job_get),
+ url('^job/(?P<jobid>[A-Z0-9]+)/log$', ais.djais.views.job_log),
+ url('^source/$', ais.djais.views.sources_index),
+ url('^source/stats$', ais.djais.views.sources_stats),
+ url('^news/(?P<page>\d*)$', ais.djais.views.news),
+ url('^news/feed', ais.djais.views.news_atom),
+ url('^logout$', ais.djais.views.logout),
# Example:
- # (r'^ais2/', include('ais2.foo.urls')),
+ # ('^ais2/', include('ais2.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
- # (r'^admin/doc/', include('django.contrib.admindocs.urls')),
+ # ('^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
- # (r'^admin/(.*)', admin.site.root),
-)
+ # ('^admin/(.*)', admin.site.root),
+# url(r'^(?P<path>.*)$', views.serve),
+
+] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
+#]
+
+
+#urlpatterns += staticfiles_urlpatterns()
-# -*- coding: utf-8 -*-
-
-from __future__ import division
-
#TODO
#Normalize 403 errors
import crack
import struct
import operator
-import rrdtool
+#NNimport rrdtool
import csv
-from StringIO import StringIO
from django.http import *
from django.template import loader, RequestContext
from django import forms
-from django.shortcuts import render_to_response, get_object_or_404
+from django.shortcuts import render, get_object_or_404
from django.utils.safestring import mark_safe
from decoratedstr import remove_decoration
@http_authenticate(auth, 'ais')
def index(request):
news = News.objects.order_by('-created')[:5]
- return render_to_response('index.html', {'news':news}, RequestContext(request))
+ return render(request, 'index.html', {'news':news})
class VesselSearchForm(forms.Form):
def country_choices():
choices = {}
- for mid, countryname in COUNTRIES_MID.iteritems():
- if choices.has_key(countryname):
- choices[countryname] += u','+unicode(mid)
+ for mid, countryname in COUNTRIES_MID.items():
+ if countryname in choices:
+ choices[countryname] += ',' + str(mid)
else:
- choices[countryname] = unicode(mid)
- choices = [ (countryname, mids) for mids, countryname in choices.iteritems() ]
+ choices[countryname] = str(mid)
+ choices = [ (countryname, mids) for mids, countryname in choices.items() ]
choices = sorted(choices, key=operator.itemgetter(1))
- return [(u'', u'Any')] + choices
+ return [('', 'Any')] + choices
name = forms.CharField(max_length=20, required=False)
- mmsi = forms.CharField(help_text=u'Maritime Mobile Service Identity', min_length=9, max_length=9, required=False)
- imo = forms.IntegerField(help_text=u'International Maritime Organization identifier assigned by Llyod', required=False)
+ mmsi = forms.CharField(help_text='Maritime Mobile Service Identity', min_length=9, max_length=9, required=False)
+ imo = forms.IntegerField(help_text='International Maritime Organization identifier assigned by Llyod', required=False)
callsign = forms.CharField(max_length=7, required=False)
flag = forms.ChoiceField(choices=country_choices(), required=False)
destination = forms.CharField(max_length=20, required=False)
@http_authenticate(auth, 'ais')
def vessel_search(request):
- if request.method == 'POST' or request.META['QUERY_STRING']:
- form = VesselSearchForm(request.REQUEST)
+ if request.method == 'POST':
+ form = VesselSearchForm(request.POST)
if form.is_valid():
data = form.cleaned_data
vessels = Vessel.objects
vessels = vessels.filter(destination__contains=data['destination'].upper())
if data['flag']:
vessels = vessels.extra(where=['mmsi/1000000 IN (%s) ' % data['flag']])
- return render_to_response('vessels.html', {'vessels': vessels}, RequestContext(request))
+ return render(request, 'vessels.html', {'vessels': vessels})
else: # GET
form = VesselSearchForm()
- return render_to_response('vessel_index.html', {'form': form}, RequestContext(request))
+ return render(request, 'vessel_index.html', {'form': form})
class SecondsWidget(forms.MultiWidget):
'''
Does not work well for more than a week, as month lengthes do vary.
'''
__periods = (
- (u'1', u'second(s)'),
- (u'60', u'minute(s)'),
- (u'3600', u'hour(s)'),
- (u'86400', u'day(s)'),
- (u'604800', u'week(s)'),
- (u'2592000', u'month(es)'))
+ ('1', 'second(s)'),
+ ('60', 'minute(s)'),
+ ('3600', 'hour(s)'),
+ ('86400', 'day(s)'),
+ ('604800', 'week(s)'),
+ ('2592000', 'month(es)'))
def __init__(self, attrs=None):
textattrs = { 'size': 3 }
if value:
for period_second in reversed([int(sec_txt[0]) for sec_txt in self.__periods]):
if value >= period_second and not value % period_second:
- return [ unicode(value // period_second), unicode(period_second) ]
+ return [ str(value // period_second), str(period_second) ]
return [None, None]
class SecondsField(forms.MultiValueField):
end_date = forms.DateTimeField(required=False, widget=AisCalendarWidget(attrs={'class':'vDateField'}))
grain = SecondsField(label='One position every', initial=3600)
filter_area = forms.ChoiceField(required=False, label='Only in area',
- choices=[ (u'', u'(Not filtered)') ] + [ (fn[1], fn[0]) for fn in list_areas() ] )
+ choices=[ ('', '(Not filtered)') ] + [ (fn[1], fn[0]) for fn in list_areas() ] )
reference_latitude = forms.CharField(required=False)
reference_longitude = forms.CharField(required=False)
- filter_close_to = forms.FloatField(required=False, label=u'Only if closer than n miles from reference point')
- filter_far_from = forms.FloatField(required=False, label=u'Only if farther than n miles from reference point')
- filter_sog_le = forms.FloatField(required=False, label=u'Only if speed ≤ (knots)')
- filter_sog_ge = forms.FloatField(required=False, label=u'Only if speed ≥ (knots)')
- filter_destination = forms.CharField(required=False, label=u'Only if destination starts with', max_length=20, help_text=u'Use AIS upper case letters')
+ filter_close_to = forms.FloatField(required=False, label='Only if closer than n miles from reference point')
+ filter_far_from = forms.FloatField(required=False, label='Only if farther than n miles from reference point')
+ filter_sog_le = forms.FloatField(required=False, label='Only if speed ≤ (knots)')
+ filter_sog_ge = forms.FloatField(required=False, label='Only if speed ≥ (knots)')
+ filter_destination = forms.CharField(required=False, label='Only if destination starts with', max_length=20, help_text='Use AIS upper case letters')
def clean_start_date(self):
period_type = self.cleaned_data.get('period_type', None)
start_date = self.cleaned_data.get('start_date', None)
- if period_type in (u'date_date', u'start_duration') and start_date is None:
+ if period_type in ('date_date', 'start_duration') and start_date is None:
raise forms.ValidationError('That is field is required.')
return start_date
def clean_duration(self):
period_type = self.cleaned_data.get('period_type', None)
duration = self.cleaned_data.get('duration', None)
- print 'duration=', duration
- if period_type in (u'duration', u'start_duration') and duration is None:
+ print('duration=', duration)
+ if period_type in ('duration', 'start_duration') and duration is None:
raise forms.ValidationError('That is field is required.')
return duration
def clean_end_date(self):
period_type = self.cleaned_data.get('period_type', None)
end_date = self.cleaned_data.get('end_date', None)
- if period_type in (u'date_date',) and end_date is None:
+ if period_type in ('date_date',) and end_date is None:
raise forms.ValidationError('That is field is required.')
return end_date
period_type = self.cleaned_data.get('period_type', None)
start_date = self.cleaned_data.get('start_date', None)
end_date = self.cleaned_data.get('end_date', None)
- if period_type == u'date_date' and start_date is not None and end_date is not None:
+ if period_type == 'date_date' and start_date is not None and end_date is not None:
if start_date >= end_date:
self._errors["start_date"] = self.error_class(['Start date must be before end date.'])
reference_latitude = self.cleaned_data.get('reference_latitude', None)
filter_far_from = self.cleaned_data.get('filter_far_from', None)
if filter_close_to is not None:
if reference_latitude is None:
- self._errors['reference_latitude'] = self.error_class([u'Field required when using close_to filter'])
+ self._errors['reference_latitude'] = self.error_class(['Field required when using close_to filter'])
if reference_longitude is None:
- self._errors['reference_longitude'] = self.error_class([u'Field required when using close_to filter'])
+ self._errors['reference_longitude'] = self.error_class(['Field required when using close_to filter'])
if filter_far_from is not None:
if reference_latitude is None:
- self._errors['reference_latitude'] = self.error_class([u'Field required when using far_from filter'])
+ self._errors['reference_latitude'] = self.error_class(['Field required when using far_from filter'])
if reference_longitude is None:
- self._errors['reference_longitude'] = self.error_class([u'Field required when using far_from filter'])
+ self._errors['reference_longitude'] = self.error_class(['Field required when using far_from filter'])
return cleaned_data
def get_cmdext(self):
Returns (command, extension)
'''
def escape_cmd_arg(txt):
- txt = unicode(txt)
+ txt = str(txt)
isclean = True
for c in txt:
if c.upper() not in '+-./0123456789=@^_ABCDEFGHIJKLMNOPQRSTUVWXYZ':
break
if isclean:
return txt
- result = u'"'
+ result = '"'
for c in txt:
- if c in u'"\\':
- result += u'\\'
+ if c in '"\\':
+ result += '\\'
result += c
- return result + u'"'
+ return result + '"'
def addparam(txt):
command.append(escape_cmd_arg(txt))
data = self.cleaned_data
format = data['format']
- if format == u'track':
- command = [ u'show_targets_ships' ]
- addparam(u'--format=track')
- extension = u'kmz'
+ if format == 'track':
+ command = [ 'show_targets_ships' ]
+ addparam('--format=track')
+ extension = 'kmz'
- elif format == u'animation':
- command = [ u'show_targets_ships' ]
- addparam(u'--format=animation')
- extension = u'kmz'
+ elif format == 'animation':
+ command = [ 'show_targets_ships' ]
+ addparam('--format=animation')
+ extension = 'kmz'
- elif format == u'csv':
- command = [ u'common' ]
- extension = u'csv'
+ elif format == 'csv':
+ command = [ 'common' ]
+ extension = 'csv'
else:
- raise Http404(u'Invalid archive format')
+ raise Http404('Invalid archive format')
date_start = data.get('start_date', None)
if date_start:
- addparam(u'--start')
+ addparam('--start')
addparam(date_start.strftime('%Y%m%d'))
date_end = data.get('end_date', None)
if date_end:
- addparam(u'--end')
+ addparam('--end')
addparam(date_end.strftime('%Y%m%d'))
duration = data.get('duration', None)
if duration:
- addparam(u'--duration')
+ addparam('--duration')
addparam(duration)
grain = data['grain']
- addparam(u'--granularity')
+ addparam('--granularity')
addparam(grain)
filter_area = data.get('filter_area', None)
if filter_area:
- addparam(u'--filter-area')
+ addparam('--filter-area')
addparam(filter_area)
filter_sog_le = data.get('filter_sog_le', None)
if filter_sog_le:
- addparam(u'--filter-sog-le')
+ addparam('--filter-sog-le')
addparam(filter_sog_le)
filter_sog_ge = data.get('filter_sog_ge', None)
if filter_sog_ge:
- addparam(u'--filter-sog-ge')
+ addparam('--filter-sog-ge')
addparam(filter_sog_ge)
filter_destination = data.get('filter_destination', None)
if filter_destination:
- addparam(u'--filter-destination')
+ addparam('--filter-destination')
addparam(filter_destination)
reference_latitude = self.cleaned_data.get('reference_latitude', None)
filter_close_to = self.cleaned_data.get('filter_close_to', None)
filter_far_from = self.cleaned_data.get('filter_far_from', None)
if filter_close_to and reference_latitude and reference_longitude:
- addparam(u'--filter-closeto')
+ addparam('--filter-closeto')
addparam(reference_latitude)
addparam(reference_longitude)
addparam(filter_close_to)
if filter_far_from and reference_latitude and reference_longitude:
- addparam(u'--filter-farfrom')
+ addparam('--filter-farfrom')
addparam(reference_latitude)
addparam(reference_longitude)
addparam(filter_far_from)
- return u' '.join(command), extension
+ return ' '.join(command), extension
@http_authenticate(auth, 'ais')
# raise Http404
is_hidden = mmsi in get_hidden_mmsi()
hide_ship = is_hidden and not request.user.flag_allowhidden
- return render_to_response('vessel.html', {'nmea': nmea, 'is_hidden': is_hidden, 'hide_ship': hide_ship,'form': HistoryForm()}, RequestContext(request))
+ return render(request, 'vessel.html', {'nmea': nmea, 'is_hidden': is_hidden, 'hide_ship': hide_ship,'form': HistoryForm()})
class VesselManualInputForm(forms.Form):
- timestamp = forms.DateTimeField(label=u'When', help_text=u'When was the observation made in GMT. Use YYYY-MM-DD HH:MM:SS format')
+ timestamp = forms.DateTimeField(label='When', help_text='When was the observation made in GMT. Use YYYY-MM-DD HH:MM:SS format')
imo = forms.IntegerField(required=False, min_value=1000000, max_value=9999999)
name = forms.CharField(max_length=20, required=False)
callsign = forms.CharField(max_length=7, required=False)
- type = forms.TypedChoiceField(required=False, choices = [ kv for kv in SHIP_TYPES.iteritems() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=0, initial=0)
- status = forms.TypedChoiceField(required=False, choices = [ kv for kv in STATUS_CODES.iteritems() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=AIS_STATUS_NOT_AVAILABLE, initial=AIS_STATUS_NOT_AVAILABLE)
+ type = forms.TypedChoiceField(required=False, choices = [ kv for kv in SHIP_TYPES.items() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=0, initial=0)
+ status = forms.TypedChoiceField(required=False, choices = [ kv for kv in STATUS_CODES.items() if 'reserved' not in kv[1].lower()], coerce=int, empty_value=AIS_STATUS_NOT_AVAILABLE, initial=AIS_STATUS_NOT_AVAILABLE)
sog = forms.FloatField(label='Speed', help_text='Over ground, in knots', required=False, min_value=0, max_value=AIS_SOG_MAX_SPEED/AIS_SOG_SCALE)
latitude = forms.CharField(required=False)
longitude = forms.CharField(required=False)
ustr = remove_decoration(ustr) # benign cleaning, but can increase size (Å“->oe)
ustr = ustr.upper() # benign cleaning
str = clean_ais_charset(ustr.encode('ascii', 'replace'))
- if unicode(str) != ustr:
+ if str(str) != ustr:
raise forms.ValidationError('Invalid character: AIS alphabet is @ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^- !"#$%&\'()*+,-./0123456789:;<=>?')
return str
def clean_latitude(self):
#TODO: use ntools.clean_latitude
data = self.cleaned_data['latitude']
- data = data.replace(u"''", u'"') # common mistake
- data = data.replace(u' ', u'') # remove spaces
- sides = u'SN'
+ data = data.replace("''", '"') # common mistake
+ data = data.replace(' ', '') # remove spaces
+ sides = 'SN'
if not data:
return AIS_LAT_NOT_AVAILABLE
tmp, side = data[:-1], data[-1]
elif side == sides[1]:
side = 1
else:
- raise forms.ValidationError(u'Last character must be either %s or %s.' % (sides[0], sides[1]))
- spl = tmp.split(u'°')
+ raise forms.ValidationError('Last character must be either %s or %s.' % (sides[0], sides[1]))
+ spl = tmp.split('°')
if len(spl) == 1:
- raise forms.ValidationError(u'You need to use the ° character.')
+ raise forms.ValidationError('You need to use the ° character.')
d, tmp = spl
try:
d = float(d)
except ValueError:
- raise forms.ValidationError(u'Degrees must be an number. It\'s %s.' % d)
- spl = tmp.split(u"'", 1)
+ raise forms.ValidationError('Degrees must be an number. It\'s %s.' % d)
+ spl = tmp.split("'", 1)
if len(spl) == 1:
# no ' sign: ok only if there is nothing but the side after °
# we don't accept seconds if there is no minutes:
if len(tmp) == 0:
m = s = 0
else:
- raise forms.ValidationError(u'You must use the \' character between ° and %s.' % data[-1])
+ raise forms.ValidationError('You must use the \' character between ° and %s.' % data[-1])
else:
m, tmp = spl
try:
m = float(m)
except ValueError:
- raise forms.ValidationError(u'Minutes must be an number. It\'s %s.' % m)
+ raise forms.ValidationError('Minutes must be an number. It\'s %s.' % m)
if len(tmp) == 0:
s = 0
else:
if tmp[-1] != '"':
- raise forms.ValidationError(u'You must use the " character between seconds and %s.' % data[-1])
+ raise forms.ValidationError('You must use the " character between seconds and %s.' % data[-1])
s = tmp[:-1]
try:
s = float(s)
except ValueError:
- raise forms.ValidationError(u'Seconds must be an number. It\'s %s.' % s)
+ raise forms.ValidationError('Seconds must be an number. It\'s %s.' % s)
data = side * ( d + m / 60 + s / 3600)
if data < -90 or data > 90:
- raise forms.ValidationError(u'%s in not in -90..90 range' % data)
+ raise forms.ValidationError('%s in not in -90..90 range' % data)
return int(data * AIS_LATLON_SCALE)
def clean_longitude(self):
#TODO: use ntools.clean_latitude
data = self.cleaned_data['longitude']
- data = data.replace(u"''", u'"') # common mistake
- data = data.replace(u' ', u'') # remove spaces
- sides = u'WE'
+ data = data.replace("''", '"') # common mistake
+ data = data.replace(' ', '') # remove spaces
+ sides = 'WE'
if not data:
return AIS_LON_NOT_AVAILABLE
tmp, side = data[:-1], data[-1]
elif side == sides[1]:
side = 1
else:
- raise forms.ValidationError(u'Last character must be either %s or %s.' % (sides[0], sides[1]))
- spl = tmp.split(u'°')
+ raise forms.ValidationError('Last character must be either %s or %s.' % (sides[0], sides[1]))
+ spl = tmp.split('°')
if len(spl) == 1:
- raise forms.ValidationError(u'You need to use the ° character.')
+ raise forms.ValidationError('You need to use the ° character.')
d, tmp = spl
try:
d = float(d)
except ValueError:
- raise forms.ValidationError(u'Degrees must be an number. It\'s %s.' % d)
- spl = tmp.split(u"'", 1)
+ raise forms.ValidationError('Degrees must be an number. It\'s %s.' % d)
+ spl = tmp.split("'", 1)
if len(spl) == 1:
# no ' sign: ok only if there is nothing but the side after °
# we don't accept seconds if there is no minutes:
if len(tmp) == 0:
m = s = 0
else:
- raise forms.ValidationError(u'You must use the \' character between ° and %s.' % data[-1])
+ raise forms.ValidationError('You must use the \' character between ° and %s.' % data[-1])
else:
m, tmp = spl
try:
m = float(m)
except ValueError:
- raise forms.ValidationError(u'Minutes must be an number. It\'s %s.' % m)
+ raise forms.ValidationError('Minutes must be an number. It\'s %s.' % m)
if len(tmp) == 0:
s = 0
else:
if tmp[-1] != '"':
- raise forms.ValidationError(u'You must use the " character between seconds and %s.' % data[-1])
+ raise forms.ValidationError('You must use the " character between seconds and %s.' % data[-1])
s = tmp[:-1]
try:
s = float(s)
except ValueError:
- raise forms.ValidationError(u'Seconds must be an number. It\'s %s.' % s)
+ raise forms.ValidationError('Seconds must be an number. It\'s %s.' % s)
data = side * ( d + m / 60 + s / 3600)
if data < -180 or data > 180:
- raise forms.ValidationError(u'%s in not in -180..180 range' % data)
+ raise forms.ValidationError('%s in not in -180..180 range' % data)
return int(data * AIS_LATLON_SCALE)
def clean_cog(self):
return int(data * AIS_COG_SCALE)
def clean_heading(self):
- #raise forms.ValidationError(u'clean_heading called')
+ #raise forms.ValidationError('clean_heading called')
data = self.cleaned_data['heading']
if data is None:
return AIS_NO_HEADING
if form.is_valid():
data = form.cleaned_data
source = 'U' + struct.pack('<I', request.user.id)[0:3]
- result = u''
+ result = ''
if data['imo'] != 0 \
or data['name'] != '' \
or data['callsign'] != '' \
return HttpResponse('Not fully implemented: '+repr(data) + '<br>' + result)
else: # GET
form = VesselManualInputForm()
- return render_to_response('vessel_manual_input.html', {'form': form, 'nmea': nmea}, RequestContext(request))
+ return render(request, 'vessel_manual_input.html', {'form': form, 'nmea': nmea})
@http_authenticate(auth, 'ais')
display_options = 0
if request.user.flag_allowhidden:
display_options = KML_DISPLAYOPT_SHOWHIDDEN
- value = kml_to_kmz(format_fleet_lastpos([ strmmsi ], document_name=strmmsi, display_options=display_options).encode('utf-8'))
- response = HttpResponse(value, mimetype="application/vnd.google-earth.kmz")
+ value = kml_to_kmz(format_fleet_lastpos([ strmmsi ], document_name=strmmsi, display_options=display_options))
+ response = HttpResponse(value, content_type="application/vnd.google-earth.kmz")
response['Content-Disposition'] = 'attachment; filename=%s.kmz' % strmmsi
return response
command, extension = form.get_cmdext()
if request.user.flag_allowhidden:
- command += u' --show-hidden-ships'
- command += u' ' + strmmsi
+ command += ' --show-hidden-ships'
+ command += ' ' + strmmsi
job = Job()
job.queue_time = datetime.utcnow()
- job.friendly_filename = u'%s.%s' % (strmmsi, extension)
+ job.friendly_filename = '%s.%s' % (strmmsi, extension)
job.user = request.user
job.command = command
job.save()
return HttpResponseRedirect('/job/%s/download' % job.id)
else: # GET
form = HistoryForm(initial=initial)
- strmmsi = strmmsi.encode('utf-8')
nmea = Nmea.new_from_lastinfo(strmmsi)
- return render_to_response('vessel_history.html', {'nmea': nmea, 'form':form}, RequestContext(request))
+ return render(request, 'vessel_history.html', {'nmea': nmea, 'form':form})
@http_authenticate(auth, 'ais')
def fleets(request):
fleetusers = request.user.fleetuser_set.order_by('fleet')
- return render_to_response('fleets.html', {'fleetusers':fleetusers}, RequestContext(request))
+ return render(request, 'fleets.html', {'fleetusers':fleetusers})
@http_authenticate(auth, 'ais')
fleet = get_object_or_404(Fleet, name=fleetname)
if not FleetUser.objects.filter(fleet=fleet.id, user=request.user.id).all():
return HttpResponseForbidden('<h1>Forbidden</h1>')
- return render_to_response('fleet.html', {'fleet':fleet}, RequestContext(request))
+ return render(request, 'fleet.html', {'fleet':fleet})
class FleetEditForm(forms.Form):
def clean_name(self):
name = self.cleaned_data['name']
- if name == u'add':
+ if name == 'add':
raise forms.ValidationError("Sorry that name is reserved. Try another one.")
if name != self.initial.get('name', None):
if Fleet.objects.filter(name=name).count():
raise forms.ValidationError("Sorry that fleet name is already in use. Try another one.")
- return remove_decoration(name).replace(u' ', u'_')
+ return remove_decoration(name).replace(' ', '_')
@http_authenticate(auth, 'ais')
return HttpResponseRedirect('/fleet/%s/' % fleet.name)
else: # GET
form = FleetEditForm(initial=initial)
- return render_to_response('fleet_edit.html', {'fleet':fleet, 'form':form}, RequestContext(request))
+ return render(request,'fleet_edit.html', {'fleet':fleet, 'form':form})
@http_authenticate(auth, 'ais')
if not FleetUser.objects.filter(fleet=fleet, user=request.user.id).all():
return HttpResponseForbidden('<h1>Forbidden</h1>')
vessels = fleet.vessel.all()
- return render_to_response('fleet_vessels.html', {'fleet':fleet, 'vessels': vessels}, RequestContext(request))
+ return render(request, 'fleet_vessels.html', {'fleet':fleet, 'vessels': vessels})
@http_authenticate(auth, 'ais')
fv = FleetVessel.objects.get(fleet=fleet, vessel=vessel)
except FleetVessel.DoesNotExist:
FleetVessel(fleet=fleet, vessel=vessel).save()
- request.user.info(u'Vessel %s added to fleet %s.' % (vessel.name, fleetname))
+ request.user.info('Vessel %s added to fleet %s.' % (vessel.name, fleetname))
else:
request.user.error('Vessel with MMSI %s is already in that fleet' % strmmsi)
return HttpResponseRedirect('/fleet/%s/list' % fleetname)
class FleetAddVessel(forms.Form):
- mmsi = forms.CharField(help_text=u'Enter one MMSI per line', required=False, widget=forms.Textarea)
+ mmsi = forms.CharField(help_text='Enter one MMSI per line', required=False, widget=forms.Textarea)
#name = forms.CharField(max_length=20, required=False)
#imo = forms.IntegerField(required=False)
#callsign = forms.CharField(max_length=7, required=False)
else: # GET
form = FleetAddVessel()
- return render_to_response('fleet_vessel_add.html', {'form': form, 'fleet': fleet}, RequestContext(request))
+ return render(request, 'fleet_vessel_add.html', {'form': form, 'fleet': fleet})
@http_authenticate(auth, 'ais')
try:
fv = FleetVessel.objects.get(fleet=fleet, vessel=vessel)
fv.delete()
- request.user.info(u'Vessel %s removed from fleet %s.' % (vessel.name, fleetname))
+ request.user.info('Vessel %s removed from fleet %s.' % (vessel.name, fleetname))
except FleetVessel.DoesNotExist:
- request.user.error(u'Vessel %s is not in fleet %s.' % (vessel.name, fleetname))
+ request.user.error('Vessel %s is not in fleet %s.' % (vessel.name, fleetname))
return HttpResponseRedirect('/fleet/%s/list' % fleetname)
try:
user = User.objects.get(login=userlogin)
except User.DoesNotExist:
- request.user.error(u'User %s does not exist.' % userlogin)
+ request.user.error('User %s does not exist.' % userlogin)
else:
- if action == u'add':
+ if action == 'add':
try:
fu = FleetUser.objects.get(fleet=fleet, user=user)
- request.user.error(u'User %s already has access.' % user.login)
+ request.user.error('User %s already has access.' % user.login)
except FleetUser.DoesNotExist:
FleetUser(fleet=fleet, user=user).save()
#TODO log
- request.user.info(u'Granted access to user %s.' % user.login)
- elif action == u'revoke':
+ request.user.info('Granted access to user %s.' % user.login)
+ elif action == 'revoke':
try:
fu = FleetUser.objects.get(fleet=fleet, user=user)
fu.delete()
#TODO log
- request.user.info(u'Revoked access to user %s.' % user.login)
+ request.user.info('Revoked access to user %s.' % user.login)
if FleetUser.objects.filter(fleet=fleet).count() == 0:
fleet.delete()
- request.user.info(u"Deleted fleet %s (no more users)." % fleet.name)
+ request.user.info("Deleted fleet %s (no more users)." % fleet.name)
return HttpResponseRedirect('/fleet/')
except FleetUser.DoesNotExist:
- request.user.error(u'User %s didn\'t have access.' % user.login)
+ request.user.error('User %s didn\'t have access.' % user.login)
else:
- request.user.error(u'Unknown action %s' % action)
+ request.user.error('Unknown action %s' % action)
fleetusers = fleet.fleetuser_set.order_by('user__name')
otherusers = User.objects.exclude(id__in=[fu.user.id for fu in fleetusers]).order_by('name')
- return render_to_response('fleet_users.html', {'fleet':fleet, 'fleetusers': fleetusers, 'otherusers': otherusers}, RequestContext(request))
+ return render(request, 'fleet_users.html', {'fleet':fleet, 'fleetusers': fleetusers, 'otherusers': otherusers})
@http_authenticate(auth, 'ais')
command, extension = form.get_cmdext()
if request.user.flag_allowhidden:
- command += u' --show-hidden-ships'
- command += u' @' + fleetname
+ command += ' --show-hidden-ships'
+ command += ' @' + fleetname
job = Job()
job.queue_time = datetime.utcnow()
- job.friendly_filename = u'%s.%s' % (fleetname, extension)
+ job.friendly_filename = '%s.%s' % (fleetname, extension)
job.user = request.user
job.command = command
job.save()
return HttpResponseRedirect('/job/%s/download' % job.id)
else: # GET
form = HistoryForm(initial=initial)
- return render_to_response('fleet_history.html', {'fleet': fleet, 'form':form}, RequestContext(request))
+ return render(request, 'fleet_history.html', {'fleet': fleet, 'form':form})
@http_authenticate(auth, 'ais')
def jobs_index(request):
- show_archive = request.REQUEST.has_key('archive')
+ show_archive = 'archive' in request.GET
if show_archive:
jobs = request.user.job_set.all()
else:
jobs = request.user.job_set.filter(archive_time__isnull=True)
- response = render_to_response('jobs.html', {'jobs': jobs, 'queue_size': Job.queue_size(), 'archive': show_archive }, RequestContext(request))
+ response = render(request, 'jobs.html', {'jobs': jobs, 'queue_size': Job.queue_size(), 'archive': show_archive })
response['Refresh'] = 15
return response
job = get_object_or_404(Job, id=jobid)
if job.user != request.user:
return HttpResponseForbidden('403 Forbidden')
- response = render_to_response('job.html', {'job': job, 'queue_size': Job.queue_size()}, RequestContext(request))
+ response = render(request, 'job.html', {'job': job, 'queue_size': Job.queue_size()})
if not job.finish_time:
response['Refresh'] = 5
elif not job.archive_time:
job = get_object_or_404(Job, id=jobid)
if job.user != request.user:
return HttpResponseForbidden('403 Forbidden')
- log = u''
+ log = ''
try:
- log = file(jobrunner.RESULT_DIR+unicode(jobid)+'.log').read()
+ log = open(jobrunner.RESULT_DIR+str(jobid)+'.log').read()
except IOError as err:
if err.errno != 2: # No such file
raise
- return render_to_response('job_log.html', {'job': job, 'log': log}, RequestContext(request))
+ return render(request, 'job_log.html', {'job': job, 'log': log})
@http_authenticate(auth, 'ais')
users = User.objects.order_by('name')
for user in users:
user.admin_ok = user.is_admin_by(request.user.id)
- if request.REQUEST.has_key('showtree'):
+ if 'showtree' in request.GET:
local_users = {}
for user in users:
user.children = []
else:
local_users[user.father_id].children.append(user)
assert root
- return render_to_response('users_tree.html', {'root': root, 'auser': request.user.id}, RequestContext(request))
+ return render(request, 'users_tree.html', {'root': root, 'auser': request.user.id})
else:
- return render_to_response('users.html', {'users':users}, RequestContext(request))
+ return render(request, 'users.html', {'users':users})
phone_re = re.compile('^\\+.+')
if new_login != self.old_login:
if User.objects.filter(login=new_login).count():
raise forms.ValidationError("Sorry that login is already in use. Try another one.")
- if new_login == u'add':
+ if new_login == 'add':
raise forms.ValidationError("Sorry that login is reserved. Try another one.")
return new_login
def user_detail(request, login):
user = get_object_or_404(User, login=login)
user.admin_ok = user.is_admin_by(request.user.id)
- return render_to_response('user_detail.html', {'auser': user}, RequestContext(request))
+ return render(request, 'user_detail.html', {'auser': user})
@http_authenticate(auth, 'ais')
def user_edit(request, login):
else: # GET
form = UserEditForm(request.user.flag_allowhidden, initial=initial)
- return render_to_response('user_edit.html', {'form':form, 'auser': user}, RequestContext(request))
+ return render(request, 'user_edit.html', {'form':form, 'auser': user})
class ChangePasswordForm(forms.Form):
return HttpResponseRedirect('/user/')
else: # GET
form = ChangePasswordForm()
- return render_to_response('user_change_password.html', {'form':form, 'auser':user}, RequestContext(request))
+ return render(request, 'user_change_password.html', {'form':form, 'auser':user})
@http_authenticate(auth, 'ais')
if request.REQUEST.get('confirm', None):
user.delete()
return HttpResponseRedirect('/user/')
- return render_to_response('user_delete.html', {'form':None, 'auser':user}, RequestContext(request))
+ return render(request, 'user_delete.html', {'form':None, 'auser':user})
def logout(request):
- response = render_to_response('logout.html', {}, RequestContext(request))
+ response = render(request, 'logout.html', {})
return response
periods = ({
})
class StatsSelectorForm(forms.Form):
- peers = forms.ChoiceField(choices=[('', 'All')] + [(id4,source['name']) for id4,source in peers_get_config().iteritems()])
+ peers = forms.ChoiceField(choices=[('', 'All')] + [(id4,source['name']) for id4,source in peers_get_config().items()])
types = forms.ChoiceField(choices=[('', 'All'), ('bytes', 'Bandwidth'), ('counts', "Packet'izer")], initial='counts')
periods = forms.ChoiceField(choices=[(period['name_tiny'], period['name_long']) for period in periods] + [('', 'All')], initial='2d')
ACTIVE_MINUTES = 5
peers_config = peers_get_config()
peers_display = []
- for id4, peer in peers_config.iteritems():
+ for id4, peer in peers_config.items():
peer['id4'] = id4
peer['id2'] = id4[2:]
peer['active'] = is_id4_active(id4, 60*ACTIVE_MINUTES)
form = StatsSelectorForm()
- response = render_to_response('sources_index.html', {'sources':peers_display, 'active_minutes': ACTIVE_MINUTES, 'form': form}, RequestContext(request))
+ response = render(request, 'sources_index.html', {'sources':peers_display, 'active_minutes': ACTIVE_MINUTES, 'form': form})
response['Refresh'] = 60*ACTIVE_MINUTES
return response
@http_authenticate(auth, 'ais')
def sources_stats(request):
- filter_peers = request.REQUEST.get('peers', None)
+ if request.method == 'POST':
+ request_vars = request.POST
+ else:
+ request_vars = request.GET
+
+ filter_peers = request_vars.get('peers', None)
if filter_peers:
- filter_peers = filter_peers.split(u',')
+ filter_peers = filter_peers.split(',')
peers_config = peers_get_config()
peers_display = []
- for id4, peer in peers_config.iteritems():
+ for id4, peer in peers_config.items():
if filter_peers:
- if unicode(id4) not in filter_peers:
+ if id4 not in filter_peers:
continue
peer['id4'] = id4
peers_display.append(peer)
peers_display = sorted(peers_display, key=lambda k: k['id4'])
- filter_types = request.REQUEST.get('types', None)
+ filter_types = request_vars.get('types', None)
if filter_types:
- filter_types = filter_types.split(u',')
+ filter_types = filter_types.split(',')
else:
filter_types = [ 'bytes', 'counts' ]
logging.error('filter_types=%s', filter_types)
- filter_periods = request.REQUEST.get('periods', None)
+ filter_periods = request_vars.get('periods', None)
if filter_periods:
display_periods = []
- for period_name in filter_periods.split(u','):
+ for period_name in filter_periods.split(','):
for period in periods:
if period['name_tiny'] == period_name:
display_periods.append(period)
'LINE:packets#FF0000:input packets', \
'LINE:lines#00FF00:AIVDM lines'
rrdtool.graph(*args)
- return render_to_response('sources.html', {'sources':peers_display, 'show_bytes': 'bytes' in filter_types, 'show_counts': 'counts' in filter_types, 'periods': display_periods}, RequestContext(request))
+ return render(request, 'sources.html', {'sources':peers_display, 'show_bytes': 'bytes' in filter_types, 'show_counts': 'counts' in filter_types, 'periods': display_periods})
@http_authenticate(auth, 'ais')
page = 1
npages = int((News.objects.count() + NEWS_PER_PAGE - 1) // NEWS_PER_PAGE)
news = News.objects.order_by('-created')[(page - 1) * NEWS_PER_PAGE : page * NEWS_PER_PAGE]
- return render_to_response('news.html', {'news':news, 'page': page, 'npages': npages}, RequestContext(request))
+ return render(request, 'news.html', {'news':news, 'page': page, 'npages': npages})
@http_authenticate(auth, 'ais')
def news_atom(request):
row = cursor.fetchone()
last_update = row[0]
- return render_to_response('news.atom', {'news':news, 'last_update':last_update}, RequestContext(request), mimetype='application/atom+xml')
+ return render(request, 'news.atom', {'news':news, 'last_update':last_update}, mimetype='application/atom+xml')
-# -*- encoding: utf-8 -*-
-
from django import forms
class AisCalendarWidget(forms.TextInput):
--- /dev/null
+"""
+WSGI config for ais project.
+
+It exposes the WSGI callable as a module-level variable named ``application``.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
+"""
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ais.djais.settings")
+
+application = get_wsgi_application()
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
"""
Library for distances over Earth.
"""
-from __future__ import division
import math
-__all__ = [ 'EARTH_RADIUS_NM', 'AIS_ANGLE_TO_RADIAN',
+__all__ = ['EARTH_RADIUS_NM', 'AIS_ANGLE_TO_RADIAN',
'latlon_to_xyz_rad', 'latlon_to_xyz_deg', 'latlon_to_xyz_ais',
- 'dist3_xyz', 'dist3_latlong_ais' ]
+ 'dist3_xyz', 'dist3_latlong_ais']
# earth mean radius: 6371 km
# nautical mile mean size: 1.8523 km
def __example__():
eq_af = latlon_to_xyz_deg(0, 0) # equator, africa
- print eq_af
+ print(eq_af)
eq_in = latlon_to_xyz_deg(0, 90) # equator indian ocean
- print eq_in
+ print(eq_in)
north = latlon_to_xyz_deg(90, 0) # north pole
- print north
+ print(north)
south = latlon_to_xyz_deg(-90, 0) # south pole
- print south
- print "distance poles = ", dist3_xyz(north, south)
+ print(south)
+ print("distance poles = ", dist3_xyz(north, south))
paris = latlon_to_xyz_deg(48.+51./60, 2.+21./60)
lemans = latlon_to_xyz_deg(48.+1./60, 0.+11./60)
- print "distance paris/lemans = ", dist3_xyz(paris, lemans), "NM"
+ print("distance paris/lemans = ", dist3_xyz(paris, lemans), "NM")
if __name__ == '__main__':
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys
import os
import logging
-#!/usr/bin/env python
-from __future__ import division
+#!/usr/bin/env python3
+
from ais.html_parser import *
if __name__ == '__main__':
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys, os, urllib2, time
from pprint import pprint
from datetime import datetime, date
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import urllib2, StringIO, gzip, time, random
#from urlgrabber.keepalive import HTTPHandler
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys
from optparse import OptionParser
from datetime import datetime
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
-import urllib2, StringIO, gzip, time, random, os
-#from urlgrabber.keepalive import HTTPHandler
+import gzip
+import os
+import random
+import sys
+import time
+import urllib.request
from datetime import datetime
-#from shipplotter_parselogs import parse_file
from ais.ntools import read_cfg
-__config_filename__ = '/etc/ais/shipplotter'
+# from shipplotter_parselogs import parse_file
-#Warning: keep-alive handler does mixes with User-Agent!
-#keepalive_handler = HTTPHandler()
-#opener = urllib2.build_opener(keepalive_handler)
-#urllib2.install_opener(opener)
-request = urllib2.Request('http://www.coaa.co.uk/shipinfo.php')
-request.add_header('User-Agent', 'ShipPlotter')
-request.add_header('Accept-encoding', 'gzip, deflate')
-request.add_header('Cache-Control', 'no-cache')
+__config_filename = '/etc/ais/shipplotter'
+# Warning: keep-alive handler does mixes with User-Agent!
+# keepalive_handler = HTTPHandler()
+# opener = urllib2.build_opener(keepalive_handler)
+# urllib2.install_opener(opener)
-def get_httpdata():
- cfg = read_cfg(__config_filename__)
+request = urllib.request.Request(
+ 'http://www.coaa.co.uk/shipinfo.php',
+ headers={
+ 'User-Agent': 'ShipPlotter',
+ 'Accept-encoding': 'gzip, deflate',
+ 'Cache-Control': 'no-cache'
+ }
+ )
+
+
+def get_httpdata(debug_headers=False):
+ """
+ Returns the raw data from COAA web site
+ Warning, this is NOT utf-8 safe data
+ """
+ cfg = read_cfg(__config_filename)
reg = cfg['reg']
extracode = cfg['extracode']
# If sharing 1 + 2 + 1->2, Fwd(1)=10, Fwd(2)=0
# If sharing 1 + 2, Fwd(1)=0, Fwd(2)=0
- mouset = random.randint(1,60)
+ mouset = random.randint(1, 60)
tt = int(time.time())
-
+
# tt must be UTC
- uo = urllib2.urlopen(request, 'Lines=0&Reg=' + reg + '&Extracode=' + extracode + '&Uponly=0&Mouset='+str(mouset)+'&LatN=161.549709&LatS=-161.548334&LonE=+266.288827&LonW=-245.287049&Fwd=0&Ver=12.4.3&Tt='+str(tt))
+ uo = urllib.request.urlopen(
+ request,
+ bytes(
+ 'Lines=0&Reg=' + reg
+ + '&Extracode=' + extracode
+ + '&Uponly=0&Mouset=' + str(mouset)
+ + '&LatN=161.549709&LatS=-161.548334'
+ + '&LonE=+266.288827&LonW=-245.287049'
+ + '&Fwd=0&Ver=12.4.3'
+ + '&Tt=' + str(tt),
+ 'utf-8')
+ )
+ if debug_headers:
+ print(uo.info(), file=sys.stderr)
httpdata = uo.read()
uo.close()
- if uo.headers.get('Content-Encoding')=='gzip':
- compressedstream = StringIO.StringIO(httpdata)
- gzipper = gzip.GzipFile(fileobj=compressedstream)
- httpdata = gzipper.read()
+ if uo.headers.get('Content-Encoding') == 'gzip':
+ httpdata = gzip.decompress(httpdata)
return httpdata
+
def save_httpdata(httpdata):
now = datetime.utcnow()
- foldername = 'shipplotter/'+now.strftime('%Y%m%d')
+ foldername = '/var/lib/ais/shipplotter/'+now.strftime('%Y%m%d')
try:
os.mkdir(foldername)
except OSError as oserr:
if oserr.errno != 17:
raise
- # else folder allrady exists, perfect
+ # else folder already exists, perfect
filename = foldername+'/'+now.strftime('%H%M%S')
file = open(filename, 'wb')
file.write(httpdata)
return filename
+def main():
+ import argparse
+ parser = argparse.ArgumentParser(
+ description="Download shipplotter data, "
+ "save it in /var/lib/ais/shipplotter/YYYYMMDD/",
+ epilog="Registration is required. See {}."
+ .format(__config_filename))
+ parser.add_argument('--debug-headers',
+ action='store_true',
+ default=False,
+ help='Print headers on stderr')
+ args = parser.parse_args()
+ # for i in range(10):
+ # data = get_httpdata()
+ # filename = save_httpdata(data)
+ # print filename
+ # parse_file(options.dbpath, filename)
+ # print 'parsed ok'
+ # time.sleep(60)
+ data = get_httpdata(args.debug_headers)
+ save_httpdata(data)
+ # keepalive_handler.close_all()
+
+
if __name__ == '__main__':
- from optparse import OptionParser
- parser = OptionParser()
- #parser.add_option('--db-path', help="set db path. default=%default", action='store', dest='dbpath', default='db')
- options, args = parser.parse_args()
- #for i in range(10):
- # data = get_httpdata()
- # filename = save_httpdata(data)
- # print filename
- # parse_file(options.dbpath, filename)
- # print 'parsed ok'
- # time.sleep(60)
- data = get_httpdata()
- print data
- #keepalive_handler.close_all()
+ main()
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
-import sys
-from optparse import OptionParser
-from pprint import pprint
import logging
+import sys
+from pprint import pformat, pprint
-from ais.common import add_nmea1, add_nmea5_partial, AIS_LATLON_SCALE, AIS_LAT_NOT_AVAILABLE, AIS_LON_NOT_AVAILABLE, AIS_SOG_SCALE, AIS_SOG_NOT_AVAILABLE, AIS_COG_SCALE, AIS_COG_NOT_AVAILABLE, AIS_NO_HEADING, AIS_ROT_NOT_AVAILABLE, DBPATH
-from ais.ntools import clean_alnum, clean_ais_charset
+from ais.common import (AIS_COG_NOT_AVAILABLE, AIS_COG_SCALE,
+ AIS_LAT_NOT_AVAILABLE, AIS_LATLON_SCALE,
+ AIS_LON_NOT_AVAILABLE, AIS_NO_HEADING,
+ AIS_ROT_NOT_AVAILABLE, AIS_SOG_NOT_AVAILABLE,
+ AIS_SOG_SCALE, DBPATH, add_nmea1, add_nmea5_partial)
+from ais.ntools import clean_ais_charset, clean_alnum
LINE_STRUCT = (
- ('mmsi', 9, None ),
- ('timestamp', 10, int ),
- ('status', 2, int ),
- ('type', -1, int ), # some MRCC wronglyfull send type over 99
- ('latitude', 10, float ),
- ('longitude', 11, float ),
- ('sog', -1, float ),
- ('cog', 5, float ),
- ('heading', 3, int ),
- ('draught', 4, float ),
- ('length', 3, int ),
- ('width', -1, int ),
- ('name', 20, None ),
- ('callsign', 7, None ),
- ('destination', 20, None ),
- ('eta', 11, str.strip ),
- ('userid1', 1, None ),
- ('imo', 7, int ),
- ('dim_bow', 3, int ),
- ('dim_port', 2, int ),
- ('userid2', 1, None ),
+ ('mmsi', 9, None ),
+ ('timestamp', 10, int ),
+ ('status', 2, int ),
+ ('type', -1, int ), # some MRCC wronglyfull send type over 99
+ ('latitude', 10, float ),
+ ('longitude', 11, float ),
+ ('sog', -1, float ),
+ ('cog', 5, float ),
+ ('heading', 3, int ),
+ ('draught', 4, float ),
+ ('length', 3, int ),
+ ('width', -1, int ),
+ ('name', 20, None ),
+ ('callsign', 7, None ),
+ ('destination', 20, None ),
+ ('eta', 11, str.strip),
+ ('userid1', 1, None ),
+ ('imo', 7, int ),
+ ('dim_bow', 3, int ),
+ ('dim_port', 2, int ),
+ ('userid2', 1, None ),
)
+
def parse_log_line(line):
info = {}
- #pprint (LINE_STRUCT)
+ # pprint (LINE_STRUCT)
pos = 0
for ik, klp in enumerate(LINE_STRUCT):
- k, l, parser = klp
- if ik:
+ k, ln, parser = klp
+ if ik > 0:
if line[pos-1] != ' ':
- logging.error('when parsing. Key=%s. Pos=%s. Expected a space.', k, pos)
+ logging.error(
+ 'when parsing. Key=%s. Pos=%s. Expected a space.', k, pos)
logging.error('%s', line)
logging.error(' '*(pos-1)+'^')
- #print >> sys.stderr, 'Error when parsing. Key='+`k`+'. Pos='+`pos`+'. Expected a space.\n'+line+'\n'+' '*(pos-1)+'^'
return None
- if l == -1:
+ if ln == -1:
# compute length
- l = line.find(' ', pos)
- if l == -1:
- l = len(line)-pos # Not tested
+ ln = line.find(' ', pos)
+ if ln == -1:
+ ln = len(line)-pos # Not tested
else:
- l -= pos
- v = line[pos:pos+l]
+ ln -= pos
+ v = line[pos:pos+ln]
if parser:
v = parser(v)
info[k] = v
- pos += l+1
+ pos += ln+1
if len(line)+1 != pos:
- # TODO: loggin
- print >> sys.stderr, 'Error when parsing "%(line)s". Found extra characters at end of line' % locals()
+ logging.error(
+ 'When parsing "%s" Found extra characters at end of line',
+ line)
return info
-
+
+
def parse_file(filename, dry_run):
monthes = 'Jan,Feb,Mar,Apr,May,Jun,Jul,Aug,Sep,Oct,Nov,Dec'.split(',')
- f = file(filename)
- for line in f.read().split('\n'):
- while line and line[-1] in '\r\n\0':
- line = line[:-1]
- if len(line)==0:
- continue # ignore blank lines
- #print line
+ f = open(filename, 'rb')
+ iline = 0 # Line number
+ while True:
+ iline += 1
+ logging.debug('Processing line %s', iline)
+ line = f.readline()
+ if not line: # '\n' for empy lines, '' for EOF
+ break
+ line = line.rstrip(b'\r\n\0')
+ if len(line) == 0:
+ continue # ignore blank lines
+ try:
+ line = str(line, 'utf-8') # may crash if encoding is incorrect
+ except UnicodeDecodeError as e:
+ logging.warning('Line %s, %s when decoding %s', iline, e, line)
+ continue # ignore the whole line
+ logging.debug('Parsing line: %s', repr(line))
info = parse_log_line(line)
if not info:
- continue # something went wrong, line skiped
+ continue # something went wrong, line skiped
- #pprint(info)
+ logging.debug(
+ 'Parser raw output: %s',
+ pformat(info, width=1500, compact=True))
mmsi = info['mmsi']
timestamp = info['timestamp']
status = info['status']
dim_stern = info['length'] - dim_bow
dim_port = info['dim_port']
dim_starboard = info['width'] - dim_port
- if dim_stern < 0 or dim_stern > 255 or dim_starboard < 0 or dim_starboard > 255:
+ if (dim_stern < 0 or dim_stern > 255
+ or dim_starboard < 0 or dim_starboard > 255):
dim_bow = dim_stern = dim_port = dim_starboard = 0
name = clean_ais_charset(info['name'])
if name.startswith(str(mmsi)):
- #print >> sys.stderr, 'Ignoring shipname', name
- name = '' # we don't wants rescue name, only real ones
+ logging.debug('Ignoring shipname %s', name)
+ name = '' # we don't wants rescue name, only real ones
callsign = info['callsign']
if callsign == 'unknown':
callsign = ''
else:
callsign = clean_alnum(callsign)
-
+
destination = info['destination']
if destination in ('unknown', '....................'):
info['destination'] = ''
else:
destination = clean_ais_charset(destination)
-
+
eta = info['eta']
eta_M = eta_D = 0
eta_h = 24
# This should not happen, but it does
eta_h = 24
eta_m = 60
-
+
source = 'SP'+info['userid1']+info['userid2']
imo = info['imo']
- #for i in (mmsi, timestamp, imo, name, callsign, type_, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source):
- # print repr(i),
- #print
+ logging.debug('Cleaned: %s', pformat(info, width=1500, compact=True)) # FIXME info doesn't have the cleaned values!
if not dry_run:
if not add_nmea1(mmsi, timestamp, status, AIS_ROT_NOT_AVAILABLE, sog, latitude, longitude, cog, heading, source):
def main():
+ import argparse
global DBPATH
- parser = OptionParser('%prog [options] filename [filename2]...')
- parser.add_option('-d', '--debug',
- action='store_true', dest='debug', default=False,
- help="debug mode")
- parser.add_option('--print-filename', help="prints each file name before it's processed", action='store_true', dest='print_filename', default=False)
- parser.add_option('--db-path', help="set db path. default=%default", action='store', dest='dbpath', default=DBPATH)
- parser.add_option('--dry-run',
- action='store_true', dest='dry_run', default=False,
- help="don't actually write anything to the db")
- (options, args) = parser.parse_args()
-
- DBPATH = options.dbpath
-
- if options.debug:
+ parser = argparse.ArgumentParser(
+ description='Inject data downloaded from COAA shipplotter into the '
+ 'database.')
+ parser.add_argument('-d', '--debug',
+ default=False,
+ action='store_true',
+ help='debug mode')
+ parser.add_argument('--print-filename',
+ default=False,
+ action='store_true',
+ help="prints each file name before it's processed")
+ parser.add_argument('--db-path',
+ default=DBPATH,
+ help="set db path. default=%(default)s")
+ parser.add_argument('--dry-run',
+ default=False,
+ action='store_true',
+ help="don't actually write anything to the db")
+ parser.add_argument('filename',
+ nargs='+')
+ args = parser.parse_args()
+
+ DBPATH = args.db_path
+
+ if args.debug:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
- logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s')
+ logging.basicConfig(level=loglevel,
+ format='%(asctime)s %(levelname)s %(message)s')
- for filename in args:
- if options.print_filename:
+ for filename in args.filename:
+ if args.print_filename:
logging.info('Processing %s', filename)
- parse_file(filename, options.dry_run)
+ parse_file(filename, args.dry_run)
+
if __name__ == '__main__':
main()
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys
from ais.ntools import read_cfg
from ais.common import add_nmea1, AIS_STATUS_NOT_AVAILABLE, AIS_ROT_NOT_AVAILABLE, AIS_SOG_NOT_AVAILABLE, AIS_LATLON_SCALE, AIS_COG_NOT_AVAILABLE, AIS_NO_HEADING
-#!/usr/bin/env python
+#!/usr/bin/env python3
-from __future__ import division
-import sys
import logging
+import sys
+from ctypes import (Structure, Union, byref, c_buffer, c_char, c_int, c_uint,
+ cdll)
from time import time as get_timestamp
-from ctypes import cdll, Structure, Union, c_buffer, c_int, c_uint, c_char, \
- byref
-from ais.ntools import mmsi_to_strmmsi
+
from ais.common import add_nmea1, add_nmea5_full
+from ais.ntools import mmsi_to_strmmsi
# difference between the unix time and the real time,
# because of leap seconds:
LEAP_SECONDS = 15
# How much time can our local clock be wrong?
-LOCAL_CLOCK_FUZZINESS = 2 # seconds
+LOCAL_CLOCK_FUZZINESS = 2 # seconds
__gpsd__ = cdll.LoadLibrary('libgps.so')
try:
__gpsd__.aivdm_decode
except AttributeError:
- #logging.warning("Shared library gps doesn't export aivdm_decode, trying gpsd lib.")
+ logging.warning("Shared library gps doesn't export aivdm_decode, trying gpsd lib.")
__gpsd__ = cdll.LoadLibrary('libgpsd.so.0')
__gpsd__.aivdm_decode
try:
__gpsd__.gpsd_report_set_level(1)
except AttributeError:
- #logging.error("Your version of gpsd doesn't support verbose level reduction.")
+ # logging.error("Your version of gpsd doesn't support verbose level reduction.")
pass
class AivdmProcessor:
def __init__(self, is_db_save):
- self.ais_context = c_buffer('\000' * 8196) #(4+4+91*4+92+2048+21+4+1024))
+ self.ais_context = c_buffer('\000' * 8196) #(4+4+91*4+92+2048+21+4+1024))
self.ais_data = GpsdAisData()
self.is_db_save = is_db_save
return None
if not __gpsd__.aivdm_decode(line, len(line)+1, byref(self.ais_context), byref(self.ais_data)):
- return True # incomplete or invalid packet: always forwards TODO
+ return True # incomplete or invalid packet: always forwards TODO
if self.ais_data.type_ in (1, 2, 3):
- #strdata = cast(byref(self.ais_data), POINTER(c_ubyte*28))
- #raw28 = strdata.contents
- #logging.debug('raw28= %s', ''.join(['%02x' % x for x in raw28]))
- #d_type, d_repeat, d_mmsi, d_status, d_turn, d_speed = \
- # struct.unpack('III4xIiI', raw28)
- #logging.debug('raw= %s %s %s X000000X %s %s %s', d_type, d_repeat, d_mmsi, d_status, d_turn, d_speed)
- #logging.debug('align = %s', alignment(GpsdAisData))
- #logging.debug('TYPE %s: mmsi:%s status:%s turn:%s speed:%s %s %s %s %s %s %s %s %s %s',
- # self.ais_data.type_,
- # self.ais_data.mmsi,
- # self.ais_data.type1.status,
- # self.ais_data.type1.turn,
- # self.ais_data.type1.speed,
- # self.ais_data.type1.accuracy,
- # self.ais_data.type1.lon,
- # self.ais_data.type1.lat,
- # self.ais_data.type1.course,
- # self.ais_data.type1.heading,
- # self.ais_data.type1.second,
- # self.ais_data.type1.maneuver,
- # self.ais_data.type1.raim,
- # self.ais_data.type1.radio
- # )
+ # strdata = cast(byref(self.ais_data), POINTER(c_ubyte*28))
+ # raw28 = strdata.contents
+ # logging.debug('raw28= %s', ''.join(['%02x' % x for x in raw28]))
+ # d_type, d_repeat, d_mmsi, d_status, d_turn, d_speed = \
+ # struct.unpack('III4xIiI', raw28)
+ # logging.debug('raw= %s %s %s X000000X %s %s %s', d_type, d_repeat, d_mmsi, d_status, d_turn, d_speed)
+ # logging.debug('align = %s', alignment(GpsdAisData))
+ # logging.debug('TYPE %s: mmsi:%s status:%s turn:%s speed:%s %s %s %s %s %s %s %s %s %s',
+ # self.ais_data.type_,
+ # self.ais_data.mmsi,
+ # self.ais_data.type1.status,
+ # self.ais_data.type1.turn,
+ # self.ais_data.type1.speed,
+ # self.ais_data.type1.accuracy,
+ # self.ais_data.type1.lon,
+ # self.ais_data.type1.lat,
+ # self.ais_data.type1.course,
+ # self.ais_data.type1.heading,
+ # self.ais_data.type1.second,
+ # self.ais_data.type1.maneuver,
+ # self.ais_data.type1.raim,
+ # self.ais_data.type1.radio
+ # )
second = self.ais_data.type1.second
if second < 60:
expected_second = timestamp % 60
delta_seconds -= 60
logging.debug('DELTA_TIME=%s',
delta_seconds)
- #logging.debug('SECOND_RECEIVED=%s SECOND_EXPECTED=%s DELTA_TIME=%s TIMESTAMP=%s TIMESTAMP_REFERENCE=%s',
- # second, expected_second, delta_seconds,
- # timestamp, timestamp - delta_seconds)
+ # logging.debug('SECOND_RECEIVED=%s SECOND_EXPECTED=%s DELTA_TIME=%s TIMESTAMP=%s TIMESTAMP_REFERENCE=%s',
+ # second, expected_second, delta_seconds,
+ # timestamp, timestamp - delta_seconds)
timestamp -= delta_seconds
- #else:
- #logging.warning('Received second=%s in AIS type %s message, not applying deltas',
- # second, self.ais_data.type_)
+ # else:
+ # logging.warning('Received second=%s in AIS type %s message, not applying deltas',
+ # second, self.ais_data.type_)
if self.is_db_save:
add_nmea1(mmsi_to_strmmsi(self.ais_data.mmsi),
timestamp,
return self.ais_data
elif self.ais_data.type_ == 5:
- #logging.debug('TYPE 5: %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s',
- # self.ais_data.mmsi,
- # self.ais_data.type5.ais_version,
- # self.ais_data.type5.imo,
- # self.ais_data.type5.callsign,
- # self.ais_data.type5.shipname,
- # self.ais_data.type5.shiptype,
- # self.ais_data.type5.to_bow,
- # self.ais_data.type5.to_stern,
- # self.ais_data.type5.to_port,
- # self.ais_data.type5.to_starboard,
- # self.ais_data.type5.epfd,
- # self.ais_data.type5.month,
- # self.ais_data.type5.day,
- # self.ais_data.type5.hour,
- # self.ais_data.type5.minute,
- # self.ais_data.type5.draught,
- # self.ais_data.type5.destination,
- # self.ais_data.type5.dte
- # )
+ # logging.debug('TYPE 5: %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s %s',
+ # self.ais_data.mmsi,
+ # self.ais_data.type5.ais_version,
+ # self.ais_data.type5.imo,
+ # self.ais_data.type5.callsign,
+ # self.ais_data.type5.shipname,
+ # self.ais_data.type5.shiptype,
+ # self.ais_data.type5.to_bow,
+ # self.ais_data.type5.to_stern,
+ # self.ais_data.type5.to_port,
+ # self.ais_data.type5.to_starboard,
+ # self.ais_data.type5.epfd,
+ # self.ais_data.type5.month,
+ # self.ais_data.type5.day,
+ # self.ais_data.type5.hour,
+ # self.ais_data.type5.minute,
+ # self.ais_data.type5.draught,
+ # self.ais_data.type5.destination,
+ # self.ais_data.type5.dte
+ # )
if self.is_db_save:
add_nmea5_full(mmsi_to_strmmsi(self.ais_data.mmsi),
timestamp,
self.ais_data.type5.to_stern,
self.ais_data.type5.to_port,
self.ais_data.type5.to_starboard,
- #self.ais_data.type5.epfd,
+ # self.ais_data.type5.epfd,
self.ais_data.type5.month,
self.ais_data.type5.day,
self.ais_data.type5.hour,
self.ais_data.type5.minute,
self.ais_data.type5.draught,
self.ais_data.type5.destination,
- #self.ais_data.type5.dte
+ # self.ais_data.type5.dte
id4)
- return self.ais_data # Forward that line
+ return self.ais_data # Forward that line
# TODO
- return self.ais_data # Forward that line
+ return self.ais_data # Forward that line
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys, htmlentities
from optparse import OptionParser
-# -*- coding: utf-8 -*-
'''
AIS input basic functions
'''
-from __future__ import division
import os.path
import logging
from datetime import datetime, timedelta
-# -*- coding: utf-8 -*-
'''
Peers definition
'''
-from __future__ import division
import logging
import pprint
from ais.ntools import str_split_column_ipv6, strmmsi_to_mmsi
if __source_normalized__:
return SOURCES
- config_file = file(CONFIG_FILENAME)
+ config_file = open(CONFIG_FILENAME)
for lineno_e, line in enumerate(config_file.readlines()):
lineno = lineno_e + 1
#logging.debug('considering line #%s: %s', lineno, repr(line))
logging.debug('SOURCES \n%s', pprint.pformat(SOURCES))
logging.debug('SOURCES (raw):\n%s', pprint.pformat(SOURCES))
- for id4, settings in SOURCES.iteritems():
+ for id4, settings in SOURCES.items():
assert len(id4) == 4, \
'Invalid ID %s in SOURCES. It should be 4 characters long.'
if __hidden_mmsi__ is None:
__hidden_mmsi__ = []
try:
- lines = file(HIDDENMMSI_FILENAME).read().split('\n')
+ lines = open(HIDDENMMSI_FILENAME).read().split('\n')
except IOError as err:
if err.errno == 2: # No such file or directory
logging.info('No hidden ship')
-# -*- coding: utf-8 -*-
'''
UDP out peers module
'''
-from __future__ import division
import logging
from time import time as get_timestamp, sleep
import socket
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
+#!/usr/bin/python3
'''
This is the main runner for AIS inputs.
'''
-from __future__ import division
#import os
import sys
import logging, logging.handlers
from daemon import DaemonContext
#import lockfile
if options.log_stderr:
- stderr = file('/var/log/ais/daemon.stderr', 'w+', 0664)
+ stderr = file('/var/log/ais/daemon.stderr', 'w+', 0o664)
else:
stderr = None
#pidfile = lockfile.FileLock('/var/run/ais/input.pid')
- with DaemonContext(stdout=stderr, stderr=stderr, umask=002):#, pidfile=pidfile):
+ with DaemonContext(stdout=stderr, stderr=stderr, umask=0o002):#, pidfile=pidfile):
mainloop(options, args)
else:
mainloop(options, args)
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
'''
Module for receiving AIVDM data from serial port.
'''
-from __future__ import division
import serial
import logging
-# -*- coding: utf-8 -*-
'''
Source statistics module
'''
-from __future__ import division
import os
from time import time as get_timestamp
import logging
-import rrdtool
+#import rrdtool FIXME
__all__ = [
'STATS_RATE', 'STATS_DIR',
-#!/usr/bin/env python
+#!/usr/bin/env python3
'''
Module for receiving AIVDM data from outbound TCP connection.
'''
-from __future__ import division
import logging
import socket
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
+#!/usr/bin/python3
'''
Module for receiving AIVDM data from UDP.
'''
-from __future__ import division
import logging
import socket
-# -*- coding: utf-8 -*-
'''
AIS virtual bases for Channels and Services.
'''
# Une source a un id4, donc des stats un et logger, mais pas de buffer
# Un canal a un buffer, et est associé à une source
-from __future__ import division
import sys
__all__ = [
-#!/usr/bin/env python
+#!/usr/bin/env python3
-from __future__ import division
-import sys
+import logging
import os
import os.path
-import time
-import logging
-import subprocess
import socket
+import subprocess
+import sys
+import time
from random import SystemRandom
-from ais.db import *
+from ais.db import *
from ais.djais.settings import AIS_BASE_URL, NOTIFICATION_EMAIL
-__all__ = [ \
+__all__ = [
'wakeup_daemon',
'DAEMON_WAKEUP_ERROR',
'make_unique_job_id',
SOCK_FILENAME = '/var/run/ais/jobrunner.wakeup'
RESULT_DIR = '/var/lib/ais/jobs/'
-ARCHIVE_EXPIRE = '1 day' # postgres interval format
+ARCHIVE_EXPIRE = '1 day' # postgres interval format
+
def wakeup_daemon():
client = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
try:
client.connect(SOCK_FILENAME)
- client.send('')
+ client.send(b'')
return True
- except:
+ except Exception as e:
+ logging.warning(e)
return False
+
DAEMON_WAKEUP_ERROR = """
Your job has been queued, but there was an error contacting the job
scheduler. Please repport the error.
"""
+
def make_unique_job_id():
def make_id():
rnd = SystemRandom()
- source = u'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
- result = u''
+ source = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
+ result = ''
for i in range(8):
result += source[int(rnd.random()*len(source))]
return result
- return make_id() # TODO check it's unique
+ return make_id() # TODO check it's unique
def addjob(user_id, command, friendly_filename, notify=None):
jobid = make_unique_job_id()
- sqlexec(u'INSERT INTO job (id, user_id, command, friendly_filename, notify) VALUES (%(id)s, %(user_id)s, %(cmd)s, %(friendly_filename)s, %(notify)s)', {
+ sqlexec('INSERT INTO job (id, user_id, command, friendly_filename, notify) VALUES (%(id)s, %(user_id)s, %(cmd)s, %(friendly_filename)s, %(notify)s)', {
'id': jobid,
'user_id': user_id,
'cmd': command,
def jobid_ext_to_filenames(jobid, friendly_filename):
extension = os.path.splitext(friendly_filename)[-1]
return RESULT_DIR + jobid + extension, RESULT_DIR + jobid + '.log'
-
+
+
def startup_clean():
- sqlexec(u'SELECT id, pid, start_time, finish_time FROM job WHERE pid IS NOT NULL OR (start_time IS NOT NULL AND finish_time IS NULL)')
+ sqlexec('SELECT id, pid, start_time, finish_time FROM job WHERE pid IS NOT NULL OR (start_time IS NOT NULL AND finish_time IS NULL)')
while True:
row = get_common_cursor().fetchone()
if not row:
break
logging.error('Startup: Job %s is supposed to be running: pid=%s, start_time=%s, finish_time=%s', *row)
# TODO: kill pid ?
-
- sqlexec(u'UPDATE job SET pid=NULL, start_time=NULL WHERE pid IS NOT NULL OR (start_time IS NOT NULL AND finish_time IS NULL)')
+
+ sqlexec('UPDATE job SET pid=NULL, start_time=NULL WHERE pid IS NOT NULL OR (start_time IS NOT NULL AND finish_time IS NULL)')
dbcommit()
import smtplib
server = smtplib.SMTP('localhost')
server.set_debuglevel(1)
- message = "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s" \
- % (fromaddr, toaddr, subject, message)
+ message = (
+ "From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s"
+ % (fromaddr, toaddr, subject, message))
try:
server.sendmail(fromaddr, toaddr, message)
except:
"""
# remove jobs archived for more than 1 day
deleted_jobs = []
- sqlexec(u"SELECT id, friendly_filename FROM job WHERE archive_time < now() AT TIME ZONE 'GMT' - '%s'::interval" % ARCHIVE_EXPIRE)
+ sqlexec("SELECT id, friendly_filename FROM job WHERE archive_time < now() AT TIME ZONE 'GMT' - '%s'::interval" % ARCHIVE_EXPIRE)
while True:
row = get_common_cursor().fetchone()
if row is None:
deleted_jobs.append(row[0])
for jobid in deleted_jobs:
- sqlexec(u'DELETE FROM job WHERE id=%(id)s', {'id':jobid})
+ sqlexec('DELETE FROM job WHERE id=%(id)s', {'id': jobid})
dbcommit()
- sqlexec(u'SELECT id, command, friendly_filename, user_id FROM job WHERE start_time IS NULL ORDER BY queue_time LIMIT 1')
+ sqlexec('SELECT id, command, friendly_filename, user_id FROM job WHERE start_time IS NULL ORDER BY queue_time LIMIT 1')
row = get_common_cursor().fetchone()
if row is None:
- dbcommit() # Do not leave a transaction open
+ dbcommit() # Do not leave a transaction open
logging.debug('Queue is empty.')
return False
jobid, command, friendly_filename, user_id = row
- command = 'python -m ais.' + command
+ command = 'python3 -m ais.' + command
logging.info('Starting job %s: %s', jobid, command)
- sqlexec(u"UPDATE job SET start_time=now() AT TIME ZONE 'GMT' WHERE id=%(jobid)s", {'jobid': jobid})
+ sqlexec("UPDATE job SET start_time=now() AT TIME ZONE 'GMT' WHERE id=%(jobid)s", {'jobid': jobid})
dbcommit()
filename, logfilename = jobid_ext_to_filenames(jobid, friendly_filename)
- output = file(filename, 'wb')
- logfile = file(logfilename, 'wb')
+ output = open(filename, 'wb')
+ logfile = open(logfilename, 'wb')
p = subprocess.Popen(command, stdout=output, stderr=logfile, shell=True)
logging.debug('System process id = %s', p.pid)
- sqlexec(u'UPDATE job SET pid=' + unicode(p.pid) + ' WHERE id=%(jobid)s', {'jobid': jobid})
+ sqlexec('UPDATE job SET pid=' + str(p.pid) + ' WHERE id=%(jobid)s', {'jobid': jobid})
dbcommit()
-
+
returncode = p.wait()
- sqlexec(u"UPDATE job SET pid=NULL, finish_time=now() AT TIME ZONE 'GMT', result=" + unicode(returncode) + " WHERE id=%(jobid)s", {'jobid': jobid})
+ sqlexec("UPDATE job SET pid=NULL, finish_time=now() AT TIME ZONE 'GMT', result=" + str(returncode) + " WHERE id=%(jobid)s",
+ {'jobid': jobid})
dbcommit()
logging.info('Job complete: result=%s', returncode)
- sqlexec(u'SELECT notify FROM job where id=%(jobid)s', {'jobid': jobid})
+ sqlexec('SELECT notify FROM job where id=%(jobid)s', {'jobid': jobid})
row = get_common_cursor().fetchone()
if row:
notify = row[0]
- if notify == u'W': # Web
- sqlexec(u"INSERT INTO user_message (user_id, user_message_category_id, txt) VALUES(%(user_id)s, 'info', %(msg)s)", {'user_id':user_id, 'msg':('Your <a href="/job/%(jobid)s/download">job %(jobid)s</a> is complete.' % {'jobid': jobid}) })
- sqlexec(u'UPDATE job SET notify=NULL WHERE id=%(jobid)s', {'jobid': jobid})
+ if notify == 'W': # Web
+ sqlexec("INSERT INTO user_message (user_id, user_message_category_id, txt) VALUES(%(user_id)s, 'info', %(msg)s)",
+ {'user_id': user_id,
+ 'msg': ('Your <a href="/job/%(jobid)s/download">job %(jobid)s</a> is complete.'
+ % {'jobid': jobid})})
+ sqlexec('UPDATE job SET notify=NULL WHERE id=%(jobid)s',
+ {'jobid': jobid})
dbcommit()
- elif notify == u'M': # Notification
- sqlexec('SELECT email FROM "user" WHERE id=%(user_id)s', {'user_id':user_id})
+ elif notify == 'M': # Notification
+ sqlexec('SELECT email FROM "user" WHERE id=%(user_id)s',
+ {'user_id': user_id})
row = get_common_cursor().fetchone()
email = None
if row is not None:
if not foreground:
from daemon import DaemonContext
- stderr = file('/var/log/ais/jobrunner', 'w+', 0664)
- #ais_gid = grp.getgrnam('ais').gr_gid
- dctx = DaemonContext(stdout=stderr, stderr=stderr, umask=002)
- # todo: pidfile= with import lockfile (squeeze)
+ stderr = open('/var/log/ais/jobrunner', 'w+', 0o664)
+ # ais_gid = grp.getgrnam('ais').gr_gid
+ dctx = DaemonContext(stdout=stderr, stderr=stderr, umask=0o002)
+ # TODO: pidfile= with import lockfile (squeeze)
dctx.open()
if os.path.exists(SOCK_FILENAME):
if not os.path.exists(run_dir):
os.makedirs(run_dir)
server.bind(SOCK_FILENAME)
- os.chmod(SOCK_FILENAME, 0777) # anyone can wake up the daemon
- #TODO: set receive queue size to 0 or 1 byte
-
- #TODO: don't run startup_clean if another daemon is runing, that is using pid file
+ os.chmod(SOCK_FILENAME, 0o777) # anyone can wake up the daemon
+ # TODO: set receive queue size to 0 or 1 byte
+
+ # TODO: don't run startup_clean if another daemon is runing, that is using pid file
startup_clean()
while True:
if not runjob():
- server.recv(1024) # blocks
+ server.recv(1024) # blocks
+
-
def main():
- from optparse import OptionParser
+ import argparse
+
+ parser = argparse.ArgumentParser(
+ description='Run AIS search queued jobs.')
- parser = OptionParser(usage='%prog [options] [userid command friendlyname]')
- parser.add_option('--foreground',
- action='store_true', dest='foreground', default=False,
+ parser.add_argument(
+ '--foreground',
+ default=False, action='store_true',
help='runs daemon in foreground.')
- parser.add_option('--nice',
- action='store', type='int', dest='nice', default=5,
- help='set scheduling nice value. Default = %default')
- parser.add_option('-d', '--debug',
+ parser.add_argument(
+ '--nice',
+ type=int, default=5,
+ help='set scheduling nice value. Default = %(default)s')
+ parser.add_argument(
+ '-d', '--debug',
action='store_true', dest='debug', default=False,
help='debug mode')
- parser.add_option('--debug-sql',
- action='store_true', dest='debug_sql', default=False,
+ parser.add_argument(
+ '--debug-sql',
+ action='store_true', default=False,
help='prints sql statements. Implies --debug')
-
- parser.add_option('--notification-code',
+ parser.add_argument(
+ '--notification-code',
action='store', dest='notify', default='W',
+ choices='WM',
help='Job complete notification method: W=web M=mail')
- options, args = parser.parse_args()
-
- if options.debug_sql:
+
+ parser.add_argument(
+ 'user_id', nargs='?',
+ help='user id')
+ parser.add_argument(
+ 'command', nargs='?',
+ help='command to run. Typically starts with "common" or'
+ ' "show_targets_ships".'
+ ' See "python3 -m ais.common -h" and "python3 -m '
+ ' ais.show_targets_ships -h" for help.'
+ )
+ parser.add_argument(
+ 'friendly_name', nargs='?',
+ help='Friendly name of the result file. Might end with .kmz or .csv')
+
+ args = parser.parse_args()
+
+ if args.debug_sql:
sql_setdebug(True)
- options.debug = True
- if options.debug:
+ if args.debug:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s %(message)s')
- if args:
- assert len(args)==3, 'There must be either no arguments (daemon) or 3 of them'
- user_id = int(args[0])
- command = args[1]
- friendly_name = args[2]
- jobid = addjob(user_id, command, friendly_name, notify=options.notify)
- print >> sys.stderr, 'Job', jobid, 'queued'
- sys.exit(0)
-
+ if args.user_id:
+ if args.friendly_name is None:
+ parser.error('There must be either no arguments (daemon) or 3 of them')
+ jobid = addjob(args.user_id, args.command, args.friendly_name, notify=args.notify)
+ print('Job', jobid, 'queued', file=sys.stderr)
+ else:
+ daemon(foreground=args.foreground)
- daemon(foreground = options.foreground)
if __name__ == '__main__':
main()
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
#
# Country file generator
#
-# taken from http://www.itu.int/cgi-bin/htsh/glad/cga_mids.sh?lng=E
-#
-
-from __future__ import division
+# taken from https://www.itu.int/en/ITU-R/terrestrial/fmd/Pages/mid.aspx
+# (previously http://www.itu.int/cgi-bin/htsh/glad/cga_mids.sh?lng=E )
-s = '''
+s='''
201 Albania (Republic of)
202 Andorra (Principality of)
203 Austria
-204 Azores
+204 Portugal - Azores
205 Belgium
206 Belarus (Republic of)
207 Bulgaria (Republic of)
208 Vatican City State
-209, 210 Cyprus (Republic of)
+209 Cyprus (Republic of)
+210 Cyprus (Republic of)
211 Germany (Federal Republic of)
212 Cyprus (Republic of)
213 Georgia
215 Malta
216 Armenia (Republic of)
218 Germany (Federal Republic of)
-219, 220 Denmark
-224, 225 Spain
-226, 227, 228 France
+219 Denmark
+220 Denmark
+224 Spain
+225 Spain
+226 France
+227 France
+228 France
+229 Malta
230 Finland
-231 Faroe Islands
-232, 233, 234, 235 United Kingdom
-236 Gibraltar
+231 Denmark - Faroe Islands
+232 United Kingdom of Great Britain and Northern Ireland
+233 United Kingdom of Great Britain and Northern Ireland
+234 United Kingdom of Great Britain and Northern Ireland
+235 United Kingdom of Great Britain and Northern Ireland
+236 United Kingdom of Great Britain and Northern Ireland - Gibraltar
237 Greece
238 Croatia (Republic of)
-239, 240 Greece
+239 Greece
+240 Greece
+241 Greece
242 Morocco (Kingdom of)
-243 Hungary (Republic of)
-244, 245, 246 Netherlands (Kingdom of the)
+243 Hungary
+244 Netherlands (Kingdom of the)
+245 Netherlands (Kingdom of the)
+246 Netherlands (Kingdom of the)
247 Italy
-248, 249 Malta
+248 Malta
+249 Malta
250 Ireland
251 Iceland
252 Liechtenstein (Principality of)
253 Luxembourg
254 Monaco (Principality of)
-255 Madeira
+255 Portugal - Madeira
256 Malta
-257, 258, 259 Norway
+257 Norway
+258 Norway
+259 Norway
261 Poland (Republic of)
262 Montenegro
263 Portugal
264 Romania
-265, 266 Sweden
+265 Sweden
+266 Sweden
267 Slovak Republic
268 San Marino (Republic of)
269 Switzerland (Confederation of)
271 Turkey
272 Ukraine
273 Russian Federation
-274 The Former Yugoslav Republic of Macedonia
+274 Republic of North Macedonia
275 Latvia (Republic of)
276 Estonia (Republic of)
277 Lithuania (Republic of)
278 Slovenia (Republic of)
279 Serbia (Republic of)
-301 Anguilla
-303 Alaska (State of)
-304, 305 Antigua and Barbuda
-306 Netherlands Antilles
-307 Aruba
-308, 309 Bahamas (Commonwealth of the)
-310 Bermuda
+301 United Kingdom of Great Britain and Northern Ireland - Anguilla
+303 United States of America - Alaska (State of)
+304 Antigua and Barbuda
+305 Antigua and Barbuda
+306 Netherlands (Kingdom of the) - Bonaire, Sint Eustatius and Saba
+306 Netherlands (Kingdom of the) - Curaçao
+306 Netherlands (Kingdom of the) - Sint Maarten (Dutch part)
+307 Netherlands (Kingdom of the) - Aruba
+308 Bahamas (Commonwealth of the)
+309 Bahamas (Commonwealth of the)
+310 United Kingdom of Great Britain and Northern Ireland - Bermuda
311 Bahamas (Commonwealth of the)
312 Belize
314 Barbados
316 Canada
-319 Cayman Islands
+319 United Kingdom of Great Britain and Northern Ireland - Cayman Islands
321 Costa Rica
323 Cuba
325 Dominica (Commonwealth of)
327 Dominican Republic
-329 Guadeloupe (French Department of)
+329 France - Guadeloupe (French Department of)
330 Grenada
-331 Greenland
+331 Denmark - Greenland
332 Guatemala (Republic of)
334 Honduras (Republic of)
336 Haiti (Republic of)
341 Saint Kitts and Nevis (Federation of)
343 Saint Lucia
345 Mexico
-347 Martinique (French Department of)
-348 Montserrat
+347 France - Martinique (French Department of)
+348 United Kingdom of Great Britain and Northern Ireland - Montserrat
350 Nicaragua
-351, 352, 353, 354, 355, 356, 357 Panama (Republic of)
-358 Puerto Rico
+351 Panama (Republic of)
+352 Panama (Republic of)
+353 Panama (Republic of)
+354 Panama (Republic of)
+355 Panama (Republic of)
+356 Panama (Republic of)
+357 Panama (Republic of)
+358 United States of America - Puerto Rico
359 El Salvador (Republic of)
-361 Saint Pierre and Miquelon (Territorial Collectivity of)
+361 France - Saint Pierre and Miquelon (Territorial Collectivity of)
362 Trinidad and Tobago
-364 Turks and Caicos Islands
-366, 367, 368, 369 United States of America
-370, 371, 372 Panama (Republic of)
-375, 376, 377 Saint Vincent and the Grenadines
-378 British Virgin Islands
-379 United States Virgin Islands
+364 United Kingdom of Great Britain and Northern Ireland - Turks and Caicos Islands
+366 United States of America
+367 United States of America
+368 United States of America
+369 United States of America
+370 Panama (Republic of)
+371 Panama (Republic of)
+372 Panama (Republic of)
+373 Panama (Republic of)
+374 Panama (Republic of)
+375 Saint Vincent and the Grenadines
+376 Saint Vincent and the Grenadines
+377 Saint Vincent and the Grenadines
+378 United Kingdom of Great Britain and Northern Ireland - British Virgin Islands
+379 United States of America - United States Virgin Islands
401 Afghanistan
403 Saudi Arabia (Kingdom of)
405 Bangladesh (People's Republic of)
408 Bahrain (Kingdom of)
410 Bhutan (Kingdom of)
-412, 413 China (People's Republic of)
-416 Taiwan (Province of China)
+412 China (People's Republic of)
+413 China (People's Republic of)
+414 China (People's Republic of)
+416 China (People's Republic of) - Taiwan (Province of China)
417 Sri Lanka (Democratic Socialist Republic of)
419 India (Republic of)
422 Iran (Islamic Republic of)
-423 Azerbaijani Republic
+423 Azerbaijan (Republic of)
425 Iraq (Republic of)
428 Israel (State of)
-431, 432 Japan
+431 Japan
+432 Japan
434 Turkmenistan
436 Kazakhstan (Republic of)
437 Uzbekistan (Republic of)
438 Jordan (Hashemite Kingdom of)
-440, 441 Korea (Republic of)
-443 Palestine (In accordance with Resolution 99 Rev. Antalya, 2006)
+440 Korea (Republic of)
+441 Korea (Republic of)
+443 State of Palestine (In accordance with Resolution 99 Rev. Guadalajara, 2010)
445 Democratic People's Republic of Korea
447 Kuwait (State of)
450 Lebanon
451 Kyrgyz Republic
-453 Macao (Special Administrative Region of China)
+453 China (People's Republic of) - Macao (Special Administrative Region of China)
455 Maldives (Republic of)
457 Mongolia
459 Nepal (Federal Democratic Republic of)
466 Qatar (State of)
468 Syrian Arab Republic
470 United Arab Emirates
-473, 475 Yemen (Republic of)
-477 Hong Kong (Special Administrative Region of China)
+471 United Arab Emirates
+472 Tajikistan (Republic of)
+473 Yemen (Republic of)
+475 Yemen (Republic of)
+477 China (People's Republic of) - Hong Kong (Special Administrative Region of China)
478 Bosnia and Herzegovina
-501 Adelie Land
+501 France - Adelie Land
503 Australia
506 Myanmar (Union of)
508 Brunei Darussalam
510 Micronesia (Federated States of)
511 Palau (Republic of)
512 New Zealand
-514, 515 Cambodia (Kingdom of)
-516 Christmas Island (Indian Ocean)
-518 Cook Islands
+514 Cambodia (Kingdom of)
+515 Cambodia (Kingdom of)
+516 Australia - Christmas Island (Indian Ocean)
+518 New Zealand - Cook Islands
520 Fiji (Republic of)
-523 Cocos (Keeling) Islands
+523 Australia - Cocos (Keeling) Islands
525 Indonesia (Republic of)
529 Kiribati (Republic of)
531 Lao People's Democratic Republic
533 Malaysia
-536 Northern Mariana Islands (Commonwealth of the)
+536 United States of America - Northern Mariana Islands (Commonwealth of the)
538 Marshall Islands (Republic of the)
-540 New Caledonia
-542 Niue
+540 France - New Caledonia
+542 New Zealand - Niue
544 Nauru (Republic of)
-546 French Polynesia
+546 France - French Polynesia
548 Philippines (Republic of the)
+550 Timor-Leste (Democratic Republic of)
553 Papua New Guinea
-555 Pitcairn Island
+555 United Kingdom of Great Britain and Northern Ireland - Pitcairn Island
557 Solomon Islands
-559 American Samoa
+559 United States of America - American Samoa
561 Samoa (Independent State of)
-563, 564, 565 Singapore (Republic of)
+563 Singapore (Republic of)
+564 Singapore (Republic of)
+565 Singapore (Republic of)
+566 Singapore (Republic of)
567 Thailand
570 Tonga (Kingdom of)
572 Tuvalu
574 Viet Nam (Socialist Republic of)
576 Vanuatu (Republic of)
-578 Wallis and Futuna Islands
+577 Vanuatu (Republic of)
+578 France - Wallis and Futuna Islands
601 South Africa (Republic of)
603 Angola (Republic of)
605 Algeria (People's Democratic Republic of)
-607 Saint Paul and Amsterdam Islands
-608 Ascension Island
+607 France - Saint Paul and Amsterdam Islands
+608 United Kingdom of Great Britain and Northern Ireland - Ascension Island
609 Burundi (Republic of)
610 Benin (Republic of)
611 Botswana (Republic of)
613 Cameroon (Republic of)
615 Congo (Republic of the)
616 Comoros (Union of the)
-617 Cape Verde (Republic of)
-618 Crozet Archipelago
+617 Cabo Verde (Republic of)
+618 France - Crozet Archipelago
619 Côte d'Ivoire (Republic of)
+620 Comoros (Union of the)
621 Djibouti (Republic of)
622 Egypt (Arab Republic of)
624 Ethiopia (Federal Democratic Republic of)
632 Guinea (Republic of)
633 Burkina Faso
634 Kenya (Republic of)
-635 Kerguelen Islands
-636, 637 Liberia (Republic of)
-642 Socialist People's Libyan Arab Jamahiriya
+635 France - Kerguelen Islands
+636 Liberia (Republic of)
+637 Liberia (Republic of)
+638 South Sudan (Republic of)
+642 Libya
644 Lesotho (Kingdom of)
645 Mauritius (Republic of)
647 Madagascar (Republic of)
656 Niger (Republic of the)
657 Nigeria (Federal Republic of)
659 Namibia (Republic of)
-660 Reunion (French Department of)
+660 France - Reunion (French Department of)
661 Rwanda (Republic of)
662 Sudan (Republic of the)
663 Senegal (Republic of)
664 Seychelles (Republic of)
-665 Saint Helena
-666 Somali Democratic Republic
+665 United Kingdom of Great Britain and Northern Ireland - Saint Helena
+666 Somalia (Federal Republic of)
667 Sierra Leone
668 Sao Tome and Principe (Democratic Republic of)
-669 Swaziland (Kingdom of)
+669 Eswatini (Kingdom of)
670 Chad (Republic of)
671 Togolese Republic
672 Tunisia
725 Chile
730 Colombia (Republic of)
735 Ecuador
-740 Falkland Islands (Malvinas)
-745 Guiana (French Department of)
+740 United Kingdom of Great Britain and Northern Ireland - Falkland Islands (Malvinas)
+745 France - Guiana (French Department of)
750 Guyana
755 Paraguay (Republic of)
760 Peru
pos = line.find('(')
if pos != -1:
line = line[:pos]
- codes = []
- while True:
- codes.append(line[:3])
- line = line[3:]
- if line[0:2] == ', ':
- line = line[2:]
- else:
- break
- line = line.strip()
- for code in codes:
- #print code+':', repr(line)+','
- print code+'\t'+line
-
+ line = line.strip()
+ mid = line[:3]
+ country = line[4:]
+ print(mid+':', repr(country)+',')
+ #print(code+'\t'+line)
-# -*- coding: utf-8 -*-
-#
-#from __future__ import division
-#
-#MESSAGE_TYPES = {
-# 1: 'Position Report Class A',
-# 2: 'Position Report Class A (Assigned schedule)',
-# 3: 'Position Report Class A (Response to interrogation)',
-# 4: 'Base Station Report',
-# 5: 'Ship and Voyage data',
-# 6: 'Addressed Binary Message',
-# 7: 'Binary Acknowledge',
-# 8: 'Binary Broadcast Message',
-# 9: 'Standard SAR Aircraft Position Report',
-# 10: 'UTC and Date Inquiry',
-# 11: 'UTC and Date Response',
-# 12: 'Addressed Safety Related Message',
-# 13: 'Safety Related Acknowledge',
-# 14: 'Safety Related Broadcast Message',
-# 15: 'Interrogation',
-# 16: 'Assigned Mode Command',
-# 17: 'GNSS Binary Broadcast Message',
-# 18: 'GNSS Binary Broadcast Message',
-# 19: 'Extended Class B Equipment Position Report',
-# 20: 'Data Link Management',
-# 21: 'Aids-to-navigation Report',
-# 22: 'Channel Management',
-# 23: 'Group Assignment Command',
-# 24: 'Class B CS Static Data Report',
-# 25: 'Binary Message, Single Slot',
-# 26: 'Binary Message, Multiple Slot',
-#}
+# MESSAGE_TYPES = {
+# 1: 'Position Report Class A',
+# 2: 'Position Report Class A (Assigned schedule)',
+# 3: 'Position Report Class A (Response to interrogation)',
+# 4: 'Base Station Report',
+# 5: 'Ship and Voyage data',
+# 6: 'Addressed Binary Message',
+# 7: 'Binary Acknowledge',
+# 8: 'Binary Broadcast Message',
+# 9: 'Standard SAR Aircraft Position Report',
+# 10: 'UTC and Date Inquiry',
+# 11: 'UTC and Date Response',
+# 12: 'Addressed Safety Related Message',
+# 13: 'Safety Related Acknowledge',
+# 14: 'Safety Related Broadcast Message',
+# 15: 'Interrogation',
+# 16: 'Assigned Mode Command',
+# 17: 'GNSS Binary Broadcast Message',
+# 18: 'GNSS Binary Broadcast Message',
+# 19: 'Extended Class B Equipment Position Report',
+# 20: 'Data Link Management',
+# 21: 'Aids-to-navigation Report',
+# 22: 'Channel Management',
+# 23: 'Group Assignment Command',
+# 24: 'Class B CS Static Data Report',
+# 25: 'Binary Message, Single Slot',
+# 26: 'Binary Message, Multiple Slot',
+# }
-#def log_stderr(txt):
-# print >> sys.stderr, txt
-#
-#
-#def ais_packet_reader(source, debug_discarded=False):
-# """
-# Reads raw packets from a source,
-# Check the CRC discarding invalid ones
-# Assemble fragments, taking care of channels A/B multiplexing
-# Yield armored packets
-# """
-#
-# if not debug_discarded:
-# log = lambda(x): None
-# else:
-# log = log_stderr
-# payload_fragments = {} # channel -> payload
-# payload_last_fragid = {} # channel -> last_frag_id
-#
-# for line in source:
-# while True:
-# if len(line)==0:
-# continue # skip empty lines
-# if line[-1] in '\r\n\0':
-# line = line[:-1]
-# else:
-# break
-#
-# #print "Read", line
-# if line[-3] != '*':
-# log('ERROR: no checksum found in line: '+repr(line))
-# continue
-# data,checksum = line.split('*', 1)
-#
-# crc = 0
-# for c in data[1:]:
-# crc ^= ord(c)
-# crc = '%02X'%crc
-# if crc != checksum:
-# log('ERROR: checksum failure in line: '+repr(line)+'. Recomputed CRC is '+crc)
-# continue
-#
-# tokens = data.split(',')
-# if tokens[0]!='!AIVDM':
-# log('ERROR: Ignoring line not starting with !AIVDM: '+repr(line))
-# continue
-# try:
-# aivdmid, frag_count, frag_id, seq_id, channel, armored_payload, nfillbits = tokens
-# except ValueError:
-# log('ERROR: Ignoring line without 6 comas: '+repr(line))
-# continue
-#
-# try:
-# frag_count = int(frag_count)
-# except ValueError:
-# log('ERROR: fragment count is not a number: '+repr(line))
-# continue
-#
-# try:
-# frag_id = int(frag_id)
-# except ValueError:
-# log('ERROR: fragment id is not a number: '+repr(line))
-# continue
-#
-# if channel not in 'AB':
-# log('WARNING: Invalid AIS channel.')
-# continue
-#
-# if not payload_fragments.has_key(channel):
-# payload_fragments[channel] = ''
-# payload_last_fragid[channel] = 0
-#
-# if frag_id != payload_last_fragid[channel]+1:
-# log('ERROR: Lost AIS data fragments. Ignoring previous paylaod for channel '+channel)
-# payload_fragments[channel] = ''
-# payload_last_fragid[channel] = 0
-#
-# payload_fragments[channel] += armored_payload
-# payload_last_fragid[channel] = frag_id
-#
-# if frag_id == frag_count:
-# yield payload_fragments[channel]
-# payload_fragments[channel] = ''
-# payload_last_fragid[channel] = 0
-# # see http://meeting.helcom.fi/c/document_library/get_file?folderId=75444&name=DLFE-30368.pdf
-#
-#
-#
-#def decode(armored_payload):
-# #print 'decoding', armored_payload
-#
-# # decode payload
-# payload_lenght = len(armored_payload)*6
-# #print "\tpayload length", payload_lenght, "bits"
-# payload = ''
-# nexchar = None
-# next_offset = 0
-# for c in armored_payload:
-# i = ord(c) - 48
-# if i>=40:
-# i -= 8
-# if next_offset == 0:
-# nextchar = (i<<2)
-# next_offset = 6
-# elif next_offset == 6:
-# payload += chr(nextchar+(i>>4))
-# nextchar = (i&0xf) << 4
-# next_offset = 4
-# elif next_offset == 4:
-# payload += chr(nextchar+(i>>2))
-# nextchar = (i&0x3) << 6
-# next_offset = 2
-# elif next_offset == 2:
-# payload += chr(nextchar+(i))
-# nextchar = None
-# next_offset = 0
-# #print 'Added raw "%s": %02X'% (c, i)
-# #print 'unarmoared_payload:',
-# #for c in payload:
-# # print '%02X'%ord(c),
-# #print
-# #print 'next_offset: ', next_offset
-# #if nextchar is not None:
-# # print 'nextchar: %02X'% nextchar
-# if nextchar is not None:
-# payload += chr(nextchar)
-#
-#
-# def getbit(offset):
-# return (ord(payload[offset/8])>>(7-offset%8)) & 1
-#
-# def getbits(bitoffset, cnt, signed=False):
-# res = 0L
-# negative = False
-# while cnt:
-# bit = getbit(bitoffset)
-# if signed:
-# negative = bit==1
-# signed = False
-# continue
-# res = res<<1 | bit
-# cnt -= 1
-# bitoffset += 1
-# if negative:
-# return -res
-# return res
-#
-# def getbits_binstr(bitoffset, cnt):
-# result = ''
-# while cnt:
-# if getbit(bitoffset):
-# result += '1'
-# else:
-# result += '0'
-# cnt -= 1
-# return result
-#
-#
-# def ais_6_to_8bits(data):
-# result = ''
-# while data:
-# i = data & 0x3f
-# data >>= 6
-# if i<32:
-# i+=64
-# result = chr(i)+result
-# return result
-#
-# def ais_strip(txt):
-# # remove '@' at the end of text
-# while len(txt) and txt[-1] in '@ ':
-# txt = txt[:-1]
-# return txt
-#
-#
-# decoded = {}
-# #decoded['raw_armored'] = armored_payload
-# #decoded['raw_unarmored'] = ' '.join([ '%02x'%ord(c) for c in payload])
-# message_type = getbits(0,6)
-# decoded['message_type'] = str(message_type)+'-'+MESSAGE_TYPES.get(message_type, 'Unknown')
-# if message_type in (1,2,3):
-# if payload_lenght>37:
-# decoded['mmsi'] = getbits(8,30)
-# if payload_lenght>41:
-# status = getbits(38,4)
-# decoded['status'] = str(status)+'-'+STATUS_CODES.get(status, 'Unknown')
-# if payload_lenght>49:
-# rateofturn = getbits(42, 8, True)
-# if rateofturn != -128:
-# decoded['rate_of_turn'] = rateofturn
-# if payload_lenght>59:
-# sog = getbits(50, 10) # speed over ground
-# if sog != 1023:
-# decoded['speed'] =sog/10.
-# if payload_lenght>115:
-# position_accuracy = getbit(60)
-# longitude = getbits(61,28, True)
-# latitude = getbits(89, 27, True)
-# if longitude != 0x6791AC0 and latitude != 0x6791AC0:
-# latitude = latitude/10000.
-# longitude = longitude/10000.
-# decoded['latitude'] = latitude/60.
-# decoded['longitude'] = longitude/60.
-# if payload_lenght>127:
-# cog = getbits(116,12)
-# decoded['course'] = cog/10.
-# if payload_lenght>136:
-# heading = getbits(128,9)
-# if heading!=511:
-# decoded['heading'] = heading
-# if payload_lenght>142:
-# seconds = getbits(136,6)
-# decoded['seconds'] = seconds
-# elif message_type == 4:
-# if payload_lenght>37:
-# decoded['mmsi'] = getbits(8,30)
-# if payload_lenght>77:
-# dy = getbits(38,14)
-# dm = getbits(52,4)
-# dd = getbits(56,5)
-# th = getbits(61,5)
-# tm = getbits(66,6)
-# ts = getbits(71,6)
-# fulldate = ''
-# if dy==0:
-# fulldate+='????'
-# else:
-# fulldate+='%04d'%+dy
-# if dm==0:
-# fulldate+='-??'
-# else:
-# fulldate+='-%02d'%dm
-# if dd==0:
-# fulldate+='-??'
-# else:
-# fulldate+='-%02d'%dd
-# if th==24:
-# fulldate+=' ??'
-# else:
-# fulldate+=' %02d'%th
-# if tm==60:
-# fulldate+=':??'
-# else:
-# fulldate+=':%02d'%tm
-# if ts==60:
-# fulldate+=':??'
-# else:
-# fulldate+=':%02d'%ts
-# decoded['datetime'] = fulldate
-# elif message_type == 5:
-# if payload_lenght>37:
-# decoded['mmsi'] = getbits(8,30)
-# if payload_lenght>39:
-# ais_version = getbits(38,2)
-# if ais_version != 0:
-# log_stderr("ERROR: unknown AIS version "+str(ais_version))
-# if payload_lenght>69:
-# imo = getbits(40,30)
-# decoded['imo'] = imo
-# if payload_lenght>111:
-# callsign = getbits(70,42)
-# decoded['callsign'] = ais_strip(ais_6_to_8bits(callsign))
-# if payload_lenght>231:
-# vesselname = getbits(112,120)
-# decoded['vesselname'] = ais_strip(ais_6_to_8bits(vesselname))
-# if payload_lenght>239:
-# vesseltype = getbits(232,8)
-# decoded['vesseltype'] = str(vesseltype)+'-'+SHIP_TYPES.get(vesseltype, 'Unknown')
-# if payload_lenght>257:
-# dim_bow = getbits(240,9)
-# decoded['dim_bow'] = dim_bow
-# decoded['length'] = dim_bow + getbits(249,9)
-# if payload_lenght>269:
-# dim_port = getbits(258,6)
-# decoded['dim_port'] = dim_port
-# decoded['width'] = dim_port + getbits(264,6)
-# if payload_lenght>293:
-# decoded['raw_eta'] = getbits_binstr(274, 20)
-# eta_month = getbits(274,4)
-# eta_day = getbits(278,5)
-# eta_hour = getbits(283,5)
-# eta_min = getbits(288,6)
-# if True: #eta_day!=0 and eta_month!=0:
-# decoded['eta'] = '%02d-%02d'%(eta_month,eta_day)
-# if eta_min<60 and eta_hour<24:
-# decoded['eta'] += ' %02d:%02d'%(eta_hour,eta_min)
-# else:
-# decoded['eta'] += ' ??:??'
-# if payload_lenght>301:
-# decoded['draught'] = getbits(293,9)/10.
-# if payload_lenght>421:
-# decoded['destination'] = ais_strip(ais_6_to_8bits(getbits(302,120)))
-# return decoded
-
+# def log_stderr(txt):
+# print >> sys.stderr, txt
+#
+#
+# def ais_packet_reader(source, debug_discarded=False):
+# """
+# Reads raw packets from a source,
+# Check the CRC discarding invalid ones
+# Assemble fragments, taking care of channels A/B multiplexing
+# Yield armored packets
+# """
+#
+# if not debug_discarded:
+# log = lambda(x): None
+# else:
+# log = log_stderr
+# payload_fragments = {} # channel -> payload
+# payload_last_fragid = {} # channel -> last_frag_id
+#
+# for line in source:
+# while True:
+# if len(line)==0:
+# continue # skip empty lines
+# if line[-1] in '\r\n\0':
+# line = line[:-1]
+# else:
+# break
+#
+# #print "Read", line
+# if line[-3] != '*':
+# log('ERROR: no checksum found in line: '+repr(line))
+# continue
+# data,checksum = line.split('*', 1)
+#
+# crc = 0
+# for c in data[1:]:
+# crc ^= ord(c)
+# crc = '%02X'%crc
+# if crc != checksum:
+# log('ERROR: checksum failure in line: '+repr(line)+'. Recomputed CRC is '+crc)
+# continue
+#
+# tokens = data.split(',')
+# if tokens[0]!='!AIVDM':
+# log('ERROR: Ignoring line not starting with !AIVDM: '+repr(line))
+# continue
+# try:
+# aivdmid, frag_count, frag_id, seq_id, channel, armored_payload, nfillbits = tokens
+# except ValueError:
+# log('ERROR: Ignoring line without 6 comas: '+repr(line))
+# continue
+#
+# try:
+# frag_count = int(frag_count)
+# except ValueError:
+# log('ERROR: fragment count is not a number: '+repr(line))
+# continue
+#
+# try:
+# frag_id = int(frag_id)
+# except ValueError:
+# log('ERROR: fragment id is not a number: '+repr(line))
+# continue
+#
+# if channel not in 'AB':
+# log('WARNING: Invalid AIS channel.')
+# continue
+#
+# if not payload_fragments.has_key(channel):
+# payload_fragments[channel] = ''
+# payload_last_fragid[channel] = 0
+#
+# if frag_id != payload_last_fragid[channel]+1:
+# log('ERROR: Lost AIS data fragments. Ignoring previous paylaod for channel '+channel)
+# payload_fragments[channel] = ''
+# payload_last_fragid[channel] = 0
+#
+# payload_fragments[channel] += armored_payload
+# payload_last_fragid[channel] = frag_id
+#
+# if frag_id == frag_count:
+# yield payload_fragments[channel]
+# payload_fragments[channel] = ''
+# payload_last_fragid[channel] = 0
+# # see http://meeting.helcom.fi/c/document_library/get_file?folderId=75444&name=DLFE-30368.pdf
+#
+#
+#
+# def decode(armored_payload):
+# #print 'decoding', armored_payload
+#
+# # decode payload
+# payload_lenght = len(armored_payload)*6
+# #print "\tpayload length", payload_lenght, "bits"
+# payload = ''
+# nexchar = None
+# next_offset = 0
+# for c in armored_payload:
+# i = ord(c) - 48
+# if i>=40:
+# i -= 8
+# if next_offset == 0:
+# nextchar = (i<<2)
+# next_offset = 6
+# elif next_offset == 6:
+# payload += chr(nextchar+(i>>4))
+# nextchar = (i&0xf) << 4
+# next_offset = 4
+# elif next_offset == 4:
+# payload += chr(nextchar+(i>>2))
+# nextchar = (i&0x3) << 6
+# next_offset = 2
+# elif next_offset == 2:
+# payload += chr(nextchar+(i))
+# nextchar = None
+# next_offset = 0
+# #print 'Added raw "%s": %02X'% (c, i)
+# #print 'unarmoared_payload:',
+# #for c in payload:
+# # print '%02X'%ord(c),
+# #print
+# #print 'next_offset: ', next_offset
+# #if nextchar is not None:
+# # print 'nextchar: %02X'% nextchar
+# if nextchar is not None:
+# payload += chr(nextchar)
+#
+#
+# def getbit(offset):
+# return (ord(payload[offset/8])>>(7-offset%8)) & 1
+#
+# def getbits(bitoffset, cnt, signed=False):
+# res = 0L
+# negative = False
+# while cnt:
+# bit = getbit(bitoffset)
+# if signed:
+# negative = bit==1
+# signed = False
+# continue
+# res = res<<1 | bit
+# cnt -= 1
+# bitoffset += 1
+# if negative:
+# return -res
+# return res
+#
+# def getbits_binstr(bitoffset, cnt):
+# result = ''
+# while cnt:
+# if getbit(bitoffset):
+# result += '1'
+# else:
+# result += '0'
+# cnt -= 1
+# return result
+#
+#
+# def ais_6_to_8bits(data):
+# result = ''
+# while data:
+# i = data & 0x3f
+# data >>= 6
+# if i<32:
+# i+=64
+# result = chr(i)+result
+# return result
+#
+# def ais_strip(txt):
+# # remove '@' at the end of text
+# while len(txt) and txt[-1] in '@ ':
+# txt = txt[:-1]
+# return txt
+#
+#
+# decoded = {}
+# #decoded['raw_armored'] = armored_payload
+# #decoded['raw_unarmored'] = ' '.join([ '%02x'%ord(c) for c in payload])
+# message_type = getbits(0,6)
+# decoded['message_type'] = str(message_type)+'-'+MESSAGE_TYPES.get(message_type, 'Unknown')
+# if message_type in (1,2,3):
+# if payload_lenght>37:
+# decoded['mmsi'] = getbits(8,30)
+# if payload_lenght>41:
+# status = getbits(38,4)
+# decoded['status'] = str(status)+'-'+STATUS_CODES.get(status, 'Unknown')
+# if payload_lenght>49:
+# rateofturn = getbits(42, 8, True)
+# if rateofturn != -128:
+# decoded['rate_of_turn'] = rateofturn
+# if payload_lenght>59:
+# sog = getbits(50, 10) # speed over ground
+# if sog != 1023:
+# decoded['speed'] =sog/10.
+# if payload_lenght>115:
+# position_accuracy = getbit(60)
+# longitude = getbits(61,28, True)
+# latitude = getbits(89, 27, True)
+# if longitude != 0x6791AC0 and latitude != 0x6791AC0:
+# latitude = latitude/10000.
+# longitude = longitude/10000.
+# decoded['latitude'] = latitude/60.
+# decoded['longitude'] = longitude/60.
+# if payload_lenght>127:
+# cog = getbits(116,12)
+# decoded['course'] = cog/10.
+# if payload_lenght>136:
+# heading = getbits(128,9)
+# if heading!=511:
+# decoded['heading'] = heading
+# if payload_lenght>142:
+# seconds = getbits(136,6)
+# decoded['seconds'] = seconds
+# elif message_type == 4:
+# if payload_lenght>37:
+# decoded['mmsi'] = getbits(8,30)
+# if payload_lenght>77:
+# dy = getbits(38,14)
+# dm = getbits(52,4)
+# dd = getbits(56,5)
+# th = getbits(61,5)
+# tm = getbits(66,6)
+# ts = getbits(71,6)
+# fulldate = ''
+# if dy==0:
+# fulldate+='????'
+# else:
+# fulldate+='%04d'%+dy
+# if dm==0:
+# fulldate+='-??'
+# else:
+# fulldate+='-%02d'%dm
+# if dd==0:
+# fulldate+='-??'
+# else:
+# fulldate+='-%02d'%dd
+# if th==24:
+# fulldate+=' ??'
+# else:
+# fulldate+=' %02d'%th
+# if tm==60:
+# fulldate+=':??'
+# else:
+# fulldate+=':%02d'%tm
+# if ts==60:
+# fulldate+=':??'
+# else:
+# fulldate+=':%02d'%ts
+# decoded['datetime'] = fulldate
+# elif message_type == 5:
+# if payload_lenght>37:
+# decoded['mmsi'] = getbits(8,30)
+# if payload_lenght>39:
+# ais_version = getbits(38,2)
+# if ais_version != 0:
+# log_stderr("ERROR: unknown AIS version "+str(ais_version))
+# if payload_lenght>69:
+# imo = getbits(40,30)
+# decoded['imo'] = imo
+# if payload_lenght>111:
+# callsign = getbits(70,42)
+# decoded['callsign'] = ais_strip(ais_6_to_8bits(callsign))
+# if payload_lenght>231:
+# vesselname = getbits(112,120)
+# decoded['vesselname'] = ais_strip(ais_6_to_8bits(vesselname))
+# if payload_lenght>239:
+# vesseltype = getbits(232,8)
+# decoded['vesseltype'] = str(vesseltype)+'-'+SHIP_TYPES.get(vesseltype, 'Unknown')
+# if payload_lenght>257:
+# dim_bow = getbits(240,9)
+# decoded['dim_bow'] = dim_bow
+# decoded['length'] = dim_bow + getbits(249,9)
+# if payload_lenght>269:
+# dim_port = getbits(258,6)
+# decoded['dim_port'] = dim_port
+# decoded['width'] = dim_port + getbits(264,6)
+# if payload_lenght>293:
+# decoded['raw_eta'] = getbits_binstr(274, 20)
+# eta_month = getbits(274,4)
+# eta_day = getbits(278,5)
+# eta_hour = getbits(283,5)
+# eta_min = getbits(288,6)
+# if True: #eta_day!=0 and eta_month!=0:
+# decoded['eta'] = '%02d-%02d'%(eta_month,eta_day)
+# if eta_min<60 and eta_hour<24:
+# decoded['eta'] += ' %02d:%02d'%(eta_hour,eta_min)
+# else:
+# decoded['eta'] += ' ??:??'
+# if payload_lenght>301:
+# decoded['draught'] = getbits(293,9)/10.
+# if payload_lenght>421:
+# decoded['destination'] = ais_strip(ais_6_to_8bits(getbits(302,120)))
+# return decoded
-#def print_nice_message(message):
-# if message.get('message_type', ' ')[:2] in ('1-', '2-', '3-'):
-# #print '\t', message
-# #print '*'*10, 'Position', '*'*10
-# print "%09d"%message.get('mmsi', 0),
-# print message.get('status', ''),
-# print message.get('rate_of_turn', ''),
-# print message.get('speed', ''),
-# print message.get('latitude', ''),
-# print message.get('longitude', ''),
-# print str(message.get('course', ''))+'°',
-# print str(message.get('heading', ''))+'°',
-# print str(message.get('seconds', ''))+'s',
-# print
-# elif message.get('message_type', ' ')[:2] == '4-':
-# print "%09d"%message.get('mmsi', 0),
-# print "Base station",
-# print message.get('datetime', '')
-# elif message.get('message_type', ' ')[:2] == '5-':
-# #print '*'*10, 'Info', '*'*10
-# print "%09d"%message.get('mmsi', 0),
-# print '(%d)'%message.get('imo', 0),
-# print '(%s)'%message.get('callsign', ''),
-# print message.get('vesselname', ''),
-# print message.get('vesseltype', ''),
-# print '->',
-# print message.get('eta', ''),
-# print message.get('destination', ''),
-# print message.get('length', ''),
-# print message.get('width', ''),
-# if 'draught' in message:
-# print '%.1f' % message['draught']
-# print message.get('dim_bow', ''),
-# print message.get('dim_port', ''),
-# print
-# else:
-# print "********* Message ", message.get('message_type', ' ')
-#
-#
-#
-#if __name__ == '__main__':
-# parser = OptionParser()
-# parser.add_option('--in', '-i', help="input AIS data file. Default to stdin not implemented yet.", action='store', type='str', dest='filename')
-# parser.add_option('-u', '--udp-port', help="listen on UDP port for AIS data. Example 4158.", action='store', type='int', dest='udp')
-# parser.add_option('--max-count', help="maximum number of datagram treated", action='store', type='int', dest='max_count')
-# #parser.add_option('--debug-fragments', help="prints fragments errors on etderr.", action='store_true', dest='debug_fragments', default=False)
-# #parser.add_option('--debug-dumpraw', help="prints incoming packets as they come in.", action='store_true', dest='debug_dumppackets', default=False)
-# (options, args) = parser.parse_args()
-#
-# if options.filename and options.udp:
-# print >> sys.stderr, "Can't have both file and udp as input"
-# sys.exit(1)
-# if options.filename:
-# source = logliner(options.filename)
-# elif options.udp:
-# source = udpliner(options.udp)
-# else:
-# print >> sys.stderr, "Must have either file or udp as input"
-# sys.exit(1)
-# if options.debug_dumppackets:
-# source = dumpsource(source)
-#
-#
-# count = 0
-#
-# for data in ais_packet_reader(source, options.debug_fragments):
-# data = decode(data)
-# #print_nice_message(data)
-# count += 1
-# if count==options.max_count:
-# break
-#
+
+# def print_nice_message(message):
+# if message.get('message_type', ' ')[:2] in ('1-', '2-', '3-'):
+# #print '\t', message
+# #print '*'*10, 'Position', '*'*10
+# print "%09d"%message.get('mmsi', 0),
+# print message.get('status', ''),
+# print message.get('rate_of_turn', ''),
+# print message.get('speed', ''),
+# print message.get('latitude', ''),
+# print message.get('longitude', ''),
+# print str(message.get('course', ''))+'°',
+# print str(message.get('heading', ''))+'°',
+# print str(message.get('seconds', ''))+'s',
+# print
+# elif message.get('message_type', ' ')[:2] == '4-':
+# print "%09d"%message.get('mmsi', 0),
+# print "Base station",
+# print message.get('datetime', '')
+# elif message.get('message_type', ' ')[:2] == '5-':
+# #print '*'*10, 'Info', '*'*10
+# print "%09d"%message.get('mmsi', 0),
+# print '(%d)'%message.get('imo', 0),
+# print '(%s)'%message.get('callsign', ''),
+# print message.get('vesselname', ''),
+# print message.get('vesseltype', ''),
+# print '->',
+# print message.get('eta', ''),
+# print message.get('destination', ''),
+# print message.get('length', ''),
+# print message.get('width', ''),
+# if 'draught' in message:
+# print '%.1f' % message['draught']
+# print message.get('dim_bow', ''),
+# print message.get('dim_port', ''),
+# print
+# else:
+# print "********* Message ", message.get('message_type', ' ')
+#
+#
+#
+# if __name__ == '__main__':
+# parser = OptionParser()
+# parser.add_option('--in', '-i', help="input AIS data file. Default to stdin not implemented yet.", action='store', type='str', dest='filename')
+# parser.add_option('-u', '--udp-port', help="listen on UDP port for AIS data. Example 4158.", action='store', type='int', dest='udp')
+# parser.add_option('--max-count', help="maximum number of datagram treated", action='store', type='int', dest='max_count')
+# #parser.add_option('--debug-fragments', help="prints fragments errors on etderr.", action='store_true', dest='debug_fragments', default=False)
+# #parser.add_option('--debug-dumpraw', help="prints incoming packets as they come in.", action='store_true', dest='debug_dumppackets', default=False)
+# (options, args) = parser.parse_args()
+#
+# if options.filename and options.udp:
+# print >> sys.stderr, "Can't have both file and udp as input"
+# sys.exit(1)
+# if options.filename:
+# source = logliner(options.filename)
+# elif options.udp:
+# source = udpliner(options.udp)
+# else:
+# print >> sys.stderr, "Must have either file or udp as input"
+# sys.exit(1)
+# if options.debug_dumppackets:
+# source = dumpsource(source)
+#
+#
+# count = 0
+#
+# for data in ais_packet_reader(source, options.debug_fragments):
+# data = decode(data)
+# #print_nice_message(data)
+# count += 1
+# if count==options.max_count:
+# break
+#
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
'''
Time functions.
'''
-from __future__ import division
from datetime import datetime, date
import calendar
import logging
-# -*- coding: utf-8 -*-
-from __future__ import division
-
import os
import calendar
import logging
and returns the resulting dictionary
'''
cfg = {}
- for line in file(filename).readlines():
+ for line in open(filename).readlines():
line = line.rstrip('\r\n\0')
- line = unicode(line, 'utf-8')
- if line.startswith(u'#'):
- continue # skip comments
- spl = line.split(u'=', 1)
+ if line.startswith('#'):
+ continue # skip comments
+ spl = line.split('=', 1)
if len(spl) == 2:
cfg[spl[0]] = spl[1]
else:
def open_with_mkdirs(filename, mode):
try:
- return file(filename, mode)
+ return open(filename, mode)
except IOError as ioerr:
logging.warning("file(%s,%s): errno %s %s", filename, mode, ioerr.errno, ioerr.strerror)
# FIXME only if doesn't exists ...
#print 'Creating directory', os.path.dirname(filename)
os.makedirs(os.path.dirname(filename))
- return file(filename, mode)
+ return open(filename, mode)
# log file source
for line in source:
while line and line[-1] in '\r\n\0':
line = line[:-1]
- print "INPUT", line
+ print("INPUT", line)
yield line
def xml_escape(txt):
-# -*- coding: utf-8 -*-
-
-from __future__ import division
import subprocess
__proc_keys__ = (
}
__clk_tck__ = None
+
+
def get_clk_tck():
# clocks per second:
- #= 100 -> 1 tick = 1/100 seconds
+ # = 100 -> 1 tick = 1/100 seconds
global __clk_tck__
if __clk_tck__ is not None:
return __clk_tck__
-
+
# require libc-bin to be installed. Priority is required anyways
process = subprocess.Popen(['getconf', 'CLK_TCK'], stdout=subprocess.PIPE)
output = process.communicate()[0]
__clk_tck__ = int(output)
return __clk_tck__
+
class Stat(dict):
def __init__(self, processid):
dict.__init__(self)
if not nice_state:
return state
return "%s (%s)" % (state, nice_state)
-
+
def nice_utime(self):
return self['utime'] / get_clk_tck()
def nice_stime(self):
return self['stime'] / get_clk_tck()
+
if __name__ == '__main__':
- print "clock granularity is %s" % get_clk_tck()
+ print("clock granularity is %s" % get_clk_tck())
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
import sys
from datetime import datetime, timedelta
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python3
-from __future__ import division
-import sys
+import copy
import logging
+import sys
import zipfile
-from StringIO import StringIO # TODO use python 2.6 io.BufferedWrite(sys.stdout, )
-from datetime import datetime, timedelta, time
-import copy
+from datetime import datetime, time, timedelta
+from io import BytesIO
-from ais.ntools import strmmsi_to_mmsi, mmsi_to_strmmsi
-from ais.common import *
from ais.area import load_area_from_kml_polygon
-from ais.ntools import datetime_to_timestamp, xml_escape, LatLonFormatError, clean_latitude, clean_longitude
-
-from ais.inputs.config import get_hidden_mmsi
+from ais.common import *
from ais.djais.settings import AIS_BASE_URL
+from ais.inputs.config import get_hidden_mmsi
+from ais.ntools import (LatLonFormatError, clean_latitude, clean_longitude,
+ datetime_to_timestamp, mmsi_to_strmmsi,
+ strmmsi_to_mmsi, xml_escape)
-__all__ = [ 'format_fleet_lastpos', 'format_boat_intime', 'STYLE', 'KML_DISPLAYOPT_NONAMES', 'KML_DISPLAYOPT_HISTORICAL', 'KML_DISPLAYOPT_SOURCES', 'KML_DISPLAYOPT_SHOWHIDDEN', 'kml_to_kmz' ]
+__all__ = ['format_fleet_lastpos', 'format_boat_intime', 'STYLE', 'KML_DISPLAYOPT_NONAMES', 'KML_DISPLAYOPT_HISTORICAL', 'KML_DISPLAYOPT_SOURCES', 'KML_DISPLAYOPT_SHOWHIDDEN', 'kml_to_kmz']
-KML_DISPLAYOPT_NONAMES = 1 # don't print ship name
-KML_DISPLAYOPT_HISTORICAL = 2 # never show ship track as lost
-KML_DISPLAYOPT_SOURCES = 4 # display sources
-KML_DISPLAYOPT_SHOWHIDDEN = 8 # show hidden ships
+KML_DISPLAYOPT_NONAMES = 1 # don't print ship name
+KML_DISPLAYOPT_HISTORICAL = 2 # never show ship track as lost
+KML_DISPLAYOPT_SOURCES = 4 # display sources
+KML_DISPLAYOPT_SHOWHIDDEN = 8 # show hidden ships
LOST_PERIOD = timedelta(1)
-KML_HEADER = u'''\
+KML_HEADER = '''\
<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2"
xmlns:gx="http://www.google.com/kml/ext/2.2">
<Document>
'''
-KML_FOOTER = u'''\
+KML_FOOTER = '''\
</Document>
</kml>
'''
-
class Style:
"""
"""
def __init__(self):
self.label_size = 0.7
- self.icon_size = 0.5 # 0.2
+ self.icon_size = 0.5 # 0.2
self.used_icons = set()
def _format_style(self, stylename, icon, heading=None, color=None):
color format is google styled: aabbggrr
example ffea00ff for purple
"""
- result = u''
+ result = ''
if heading is not None:
stylename += '-' + str(heading)
result += '<Style id="%s">\n' % stylename
result += '</Style>\n'
self.used_icons.add(icon)
return result
-
+
def make_header(self):
- raise NotImplementedError # abstract class
-
+ raise NotImplementedError # abstract class
+
def get_style_name(self, nmea, is_lost):
'''
Returns the name of the style based on nmea data
and whether the ship was seen recently or not.
'''
- raise NotImplementedError # abstract class
+ raise NotImplementedError # abstract class
class FishersStyle(Style):
def make_header(self):
white = None
- green = 'ff86fd5f' # '5f-fd-86'
- yellow = 'ff86eeff' #'ff-ee-86'
- red = 'ff5865fc' #'fc-65-58'
- result = u''
+ green = 'ff86fd5f' # '5f-fd-86'
+ yellow = 'ff86eeff' # 'ff-ee-86'
+ red = 'ff5865fc' # 'fc-65-58'
+ result = ''
result += self._format_style('landstation', 'capital_small.png')
result += self._format_style('base-stop', 'boat-stop.png', color=white)
result += self._format_style('fisher-stop', 'boat-stop.png', color=red)
result += self._format_style('tug-stop', 'boat-stop.png', color=green)
result += self._format_style('auth-stop', 'boat-stop.png', color=yellow)
-
- for heading in [ None ] + range(0, 360, 10):
+
+ for heading in [None] + list(range(0, 360, 10)):
result += self._format_style('base', 'boat.png', color=white, heading=heading)
result += self._format_style('fisher', 'boat.png', color=red, heading=heading)
result += self._format_style('tug', 'boat.png', color=green, heading=heading)
result += self._format_style('fisher-lost', 'boat-invis.png', color=red, heading=heading)
result += self._format_style('tug-lost', 'boat-invis.png', color=green, heading=heading)
result += self._format_style('auth-lost', 'boat-invis.png', color=yellow, heading=heading)
-
+
return result
def get_style_name(self, nmea, is_lost):
'''
if nmea.strmmsi.startswith('00') and not nmea.strmmsi.startswith('000'):
return 'landstation'
-
- if nmea.type == 30: # Fisher ship
+
+ if nmea.type == 30: # Fisher ship
stylename = 'fisher'
- elif nmea.type in (31, 32, 52): # Towing or Tug
+ elif nmea.type in (31, 32, 52): # Towing or Tug
stylename = 'tug'
- elif nmea.type in (35, 53, 55): # Authority
+ elif nmea.type in (35, 53, 55): # Authority
stylename = 'auth'
else:
stylename = 'base'
- if (nmea.status in (1, 5, 6) and (nmea.sog == AIS_SOG_NOT_AVAILABLE or nmea.sog<0.5*AIS_SOG_SCALE)) \
- or nmea.sog<1*AIS_SOG_SCALE:
+ if (nmea.status in (1, 5, 6) and (nmea.sog == AIS_SOG_NOT_AVAILABLE or nmea.sog < 0.5 * AIS_SOG_SCALE)) \
+ or nmea.sog < 1 * AIS_SOG_SCALE:
stylename += '-stop'
else:
if is_lost:
stylename += '-lost'
-
+
if nmea.cog != AIS_COG_NOT_AVAILABLE:
- course = int(nmea.cog/10.) # ais format correction
- course = (course+5)//10*10 % 360 # go to neareast 10°
+ course = int(nmea.cog / 10) # ais format correction
+ course = (course + 5) // 10 * 10 % 360 # go to neareast 10°
stylename += '-%d' % course
elif nmea.heading != AIS_NO_HEADING:
- course = (nmea.heading+5)//10*10 % 360 # go to neareast 10°
+ course = (nmea.heading + 5) // 10 * 10 % 360 # go to neareast 10°
stylename += '-%d' % course
return stylename
class PelagosStyle(Style):
def make_header(self):
white = None
- green = 'ff86fd5f' # '5f-fd-86'
- yellow = 'ff86eeff' #'ff-ee-86'
- pink = 'ffff00ea' #'ea-00-ff'
- red = 'ff5865fc' #'fc-65-58'
+ green = 'ff86fd5f' # '5f-fd-86'
+ yellow = 'ff86eeff' # 'ff-ee-86'
+ pink = 'ffff00ea' # 'ea-00-ff'
+ red = 'ff5865fc' # 'fc-65-58'
- result = u''
+ result = ''
result += self._format_style('landstation', 'capital_small.png')
result += self._format_style('base-stop', 'boat-stop.png', color=white)
result += self._format_style('hsc-stop', 'boat-stop.png', color=pink)
result += self._format_style('hazarda-stop', 'boat-stop.png', color=red)
- for heading in [ None ] + range(0, 360, 10):
+ for heading in [None] + list(range(0, 360, 10)):
result += self._format_style('base', 'boat.png', color=white, heading=heading)
result += self._format_style('cargo', 'boat.png', color=green, heading=heading)
result += self._format_style('tanker', 'boat.png', color=yellow, heading=heading)
result += self._format_style('tanker-lost', 'boat-invis.png', color=yellow, heading=heading)
result += self._format_style('hsc-lost', 'boat-invis.png', color=pink, heading=heading)
result += self._format_style('hazarda-lost', 'boat-invis.png', color=red, heading=heading)
-
+
return result
def get_style_name(self, nmea, is_lost):
'''
if (nmea.strmmsi.startswith('00') and not nmea.strmmsi.startswith('000')):
return 'landstation'
-
- if nmea.type in (41, 61, 71, 81): # major hazard materials
+
+ if nmea.type in (41, 61, 71, 81): # major hazard materials
stylename = 'hazarda'
elif nmea.type >= 70 and nmea.type <= 79:
stylename = 'cargo'
else:
stylename = 'base'
- if (nmea.status in (1, 5, 6) and (nmea.sog == AIS_SOG_NOT_AVAILABLE or nmea.sog<0.5*AIS_SOG_SCALE)) \
- or nmea.sog<1*AIS_SOG_SCALE:
+ if (nmea.status in (1, 5, 6) and (nmea.sog == AIS_SOG_NOT_AVAILABLE or nmea.sog < 0.5 * AIS_SOG_SCALE)) \
+ or nmea.sog < 1 * AIS_SOG_SCALE:
stylename += '-stop'
else:
if is_lost:
stylename += '-lost'
-
+
if nmea.cog != AIS_COG_NOT_AVAILABLE:
- course = int(nmea.cog/10.) # ais format correction
- course = (course+5)//10*10 % 360 # go to neareast 10°
+ course = int(nmea.cog / 10) # ais format correction
+ course = (course + 5) // 10 * 10 % 360 # go to neareast 10°
stylename += '-%d' % course
elif nmea.heading != AIS_NO_HEADING:
- course = (nmea.heading+5)//10*10 % 360 # go to neareast 10°
+ course = (nmea.heading + 5) // 10 * 10 % 360 # go to neareast 10°
stylename += '-%d' % course
return stylename
+
STYLE = FishersStyle()
timestamp_5, imo, name, callsign, type_, dim_bow, dim_stern, dim_port, dim_starboard, eta_M, eta_D, eta_h, eta_m, draught, destination, source_5 = Nmea5.to_values(nmea)
if latitude == AIS_LAT_NOT_AVAILABLE or longitude == AIS_LON_NOT_AVAILABLE:
- return u''
+ return ''
- result = u''
+ result = ''
- if timeinfo is not None and timeinfo != True:
+ if timeinfo is not None and timeinfo is not True:
if not isinstance(timeinfo, datetime):
timeinfo = datetime.utcfromtimestamp(timeinfo)
-
- result += u'<Placemark>\n'
+
+ result += '<Placemark>\n'
if not (display_options & KML_DISPLAYOPT_NONAMES):
- result += u'<name>' + xml_escape(nmea.get_title()) + u'</name>\n'
+ result += '<name>' + xml_escape(nmea.get_title()) + '</name>\n'
- result += u'<description><![CDATA[\n'
+ result += '<description><![CDATA[\n'
if display_options & KML_DISPLAYOPT_NONAMES:
- result += u'Vessel name: ' + xml_escape(nmea.get_name()) + u'<br>\n'
-
+ result += 'Vessel name: ' + xml_escape(nmea.get_name()) + '<br>\n'
+
dt_1 = datetime.utcfromtimestamp(timestamp_1)
if display_options & KML_DISPLAYOPT_HISTORICAL:
- result += u'%s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+ result += '%s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
is_lost = None
else:
if timeinfo is None:
- is_lost = dt_1 < datetime.utcnow()-LOST_PERIOD
+ is_lost = dt_1 < datetime.utcnow() - LOST_PERIOD
if is_lost:
- result += u'Tack <b>lost</b> since %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+ result += 'Tack <b>lost</b> since %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
else:
- result += u'Last seen %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
- else: # timeinfo is not None
- if timeinfo == True:
+ result += 'Last seen %s GMT<br>\n' % dt_1.strftime('%Y-%m-%d %H:%M:%S')
+ else: # timeinfo is not None
+ if timeinfo is True:
is_lost = None
else:
is_lost = timeinfo > dt_1 + LOST_PERIOD
if not mmsi.isdigit():
- result += u'NO MMSI<br>\n'
+ result += 'NO MMSI<br>\n'
is_land_station = False
else:
- result += u'MMSI: %s ' % mmsi
- ref_mmsi = str(mmsi) # FIXME not needed
+ result += 'MMSI: %s ' % mmsi
+ ref_mmsi = str(mmsi) # FIXME not needed
is_land_station = ref_mmsi.startswith('00') and not ref_mmsi.startswith('000')
if is_land_station:
ref_mmsi = ref_mmsi[2:]
- result += u'('+COUNTRIES_MID.get(int(ref_mmsi[:3]), u'fake')+u')<br>\n'
- if not is_land_station :
+ result += '(' + COUNTRIES_MID.get(int(ref_mmsi[:3]), 'fake') + ')<br>\n'
+ if not is_land_station:
if imo:
- #result += u'IMO<a href="http://www.xvas.it/SPECIAL/VTship.php?imo=%(imo)s&mode=CK">%(imo)s</a><br>\n' % { 'imo': imo }
- result += u'IMO: %s<br>\n' % imo
+ # result += 'IMO<a href="http://www.xvas.it/SPECIAL/VTship.php?imo=%(imo)s&mode=CK">%(imo)s</a><br>\n' % { 'imo': imo }
+ result += 'IMO: %s<br>\n' % imo
else:
- result += u'no known IMO<br>\n'
+ result += 'no known IMO<br>\n'
callsign = nmea.get_callsign(default=None)
if callsign is not None:
- result += u'Callsign: %s<br>\n' % xml_escape(callsign)
+ result += 'Callsign: %s<br>\n' % xml_escape(callsign)
if type_:
- result += u'Type: %s<br>\n' % SHIP_TYPES.get(type_, 'unknown')
+ result += 'Type: %s<br>\n' % SHIP_TYPES.get(type_, 'unknown')
if status != AIS_STATUS_NOT_AVAILABLE:
- result += u'Status: %s<br>\n' % STATUS_CODES.get(status, 'unknown')
+ result += 'Status: %s<br>\n' % STATUS_CODES.get(status, 'unknown')
if cog != AIS_COG_NOT_AVAILABLE:
- result += u'Course: %.01f°<br>\n' % (cog/10.)
+ result += 'Course: %.01f°<br>\n' % (cog / 10)
if heading != AIS_NO_HEADING:
- result += u'Heading: %d°<br>\n' % heading
+ result += 'Heading: %d°<br>\n' % heading
if sog != AIS_SOG_NOT_AVAILABLE:
if sog != AIS_SOG_FAST_MOVER:
- result += u'Speed: %.01f kts<br>\n' % (sog/AIS_SOG_SCALE)
+ result += 'Speed: %.01f kts<br>\n' % (sog / AIS_SOG_SCALE)
else:
- result += u'Speed: more that than 102.2 kts<br>\n'
+ result += 'Speed: more that than 102.2 kts<br>\n'
length = nmea.get_length()
width = nmea.get_width()
if length or width or draught:
- result += u'Size: %dx%d' % (length, width)
+ result += 'Size: %dx%d' % (length, width)
if draught:
- result += u'/%.01f' % (draught/10.)
- result += u'm<br>\n'
+ result += '/%.01f' % (draught / 10)
+ result += 'm<br>\n'
destination = nmea.get_destination(default=None)
if destination:
- result += u'Destination: %s<br>\n' % xml_escape(destination)
+ result += 'Destination: %s<br>\n' % xml_escape(destination)
eta = nmea.get_eta_str(default=None)
if eta is not None:
- result += u'ETA: %s<br>\n' % eta
+ result += 'ETA: %s<br>\n' % eta
if (display_options & KML_DISPLAYOPT_SOURCES) and (source_1 or source_5):
- result += u'Source: '
+ result += 'Source: '
if source_1:
result += Nmea.format_source(source_1)
if source_5 and source_1 != source_5:
- result += u', '+ Nmea.format_source(source_5)
- result += u'<br>\n'
- result += u'<a href="' + AIS_BASE_URL + u'/vessel/%(mmsi)s/">More...</a>' \
+ result += ', ' + Nmea.format_source(source_5)
+ result += '<br>\n'
+ result += '<a href="' + AIS_BASE_URL + '/vessel/%(mmsi)s/">More...</a>' \
% {'mmsi': mmsi}
- result += u']]>\n'
- result += u'</description>\n'
+ result += ']]>\n'
+ result += '</description>\n'
- result += u'<styleUrl>#%s</styleUrl>\n' \
+ result += '<styleUrl>#%s</styleUrl>\n' \
% STYLE.get_style_name(nmea, is_lost)
- result += u'<Point>\n'
- result += u'<coordinates>%s,%s</coordinates>' \
- % (longitude/AIS_LATLON_SCALE, latitude/AIS_LATLON_SCALE)
- result += u'</Point>\n'
+ result += '<Point>\n'
+ result += '<coordinates>%s,%s</coordinates>' \
+ % (longitude / AIS_LATLON_SCALE, latitude / AIS_LATLON_SCALE)
+ result += '</Point>\n'
if timeinfo is not None:
- #result += u'<TimeStamp><when>%s</when></TimeStamp>\n' \
- # % (dt_1.strftime('%Y-%m-%dT%H:%M:%SZ'))
- result += u'<gx:TimeSpan><begin>%s</begin>' \
+ # result += '<TimeStamp><when>%s</when></TimeStamp>\n' \
+ # % (dt_1.strftime('%Y-%m-%dT%H:%M:%SZ'))
+ result += '<gx:TimeSpan><begin>%s</begin>' \
% dt_1.strftime('%Y-%m-%dT%H:%M:%SZ')
- if timeinfo != True:
- result += u'<end>%s</end>' \
+ if timeinfo is not True:
+ result += '<end>%s</end>' \
% timeinfo.strftime('%Y-%m-%dT%H:%M:%SZ')
- result += u'</gx:TimeSpan>\n'
- result += u'</Placemark>\n'
+ result += '</gx:TimeSpan>\n'
+ result += '</Placemark>\n'
return result
-
-
def format_fleet_lastpos(mmsi_iterator, document_name=None, display_options=0):
- result = u''
+ result = ''
result += KML_HEADER
if document_name is None:
document_name = 'AIS database'
- result += u'<name>%s</name>\n' % document_name
-
+ result += '<name>%s</name>\n' % document_name
+
result += STYLE.make_header()
long_ago = datetime_to_timestamp(datetime.utcnow() - timedelta(90))
for mmsi in mmsi_iterator:
nmea = Nmea.new_from_lastinfo(mmsi)
if not (display_options & KML_DISPLAYOPT_SHOWHIDDEN) and strmmsi_to_mmsi(mmsi) in get_hidden_mmsi():
- result += u'<Placemark>\n'
- result += u'<name>' + xml_escape(nmea.get_title()) + u'</name>\n'
- result += u'<description>Sorry, access to the position of that ship is restricted. It is not available for you.</description>\n'
- result += u'</Placemark>\n'
+ result += '<Placemark>\n'
+ result += '<name>' + xml_escape(nmea.get_title()) + '</name>\n'
+ result += '<description>Sorry, access to the position of that ship is restricted. It is not available for you.</description>\n'
+ result += '</Placemark>\n'
continue
if nmea.get_last_timestamp() < long_ago:
continue
- result += format_boat_data(nmea, display_options = display_options | KML_DISPLAYOPT_SOURCES)
+ result += format_boat_data(nmea, display_options=display_options | KML_DISPLAYOPT_SOURCES)
result += KML_FOOTER
return result
def format_boat_intime_section(nmea_iterator, kml_displayopt=0):
- result = u''
+ result = ''
last_nmea = None
for nmea in nmea_iterator:
if last_nmea is None:
timeinfo = datetime.utcfromtimestamp(last_nmea.timestamp_1)
result += format_boat_data(nmea, timeinfo,
- kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+ kml_displayopt | KML_DISPLAYOPT_HISTORICAL)
# make a copy because nmea will be patched with new data:
last_nmea = copy.copy(nmea)
if not result:
- result += u'<description>Vessel not found</description>'
+ result += '<description>Vessel not found</description>'
return result
def format_boat_intime(nmea_iterator):
- result = u''
+ result = ''
result += KML_HEADER
result += STYLE.make_header()
result += format_boat_intime_section(nmea_iterator)
return result
-def format_boat_track_section(nmea_iterator, name=u''):
+def format_boat_track_section(nmea_iterator, name=''):
strcoordinates = '<Placemark>\n<LineString>\n<coordinates>\n'
segment_length = 0
for nmea in nmea_iterator:
- if name == u'':
+ if name == '':
name = nmea.get_title()
if nmea.longitude != AIS_LON_NOT_AVAILABLE and nmea.latitude != AIS_LAT_NOT_AVAILABLE:
if segment_length > 65000:
logging.debug('Line is too long. Spliting.')
strcoordinates += ' %.8f,%.8f' \
- % (nmea.longitude/AIS_LATLON_SCALE,
- nmea.latitude/AIS_LATLON_SCALE)
+ % (nmea.longitude / AIS_LATLON_SCALE,
+ nmea.latitude / AIS_LATLON_SCALE)
strcoordinates += '</coordinates>\n</LineString>\n</Placemark>\n<Placemark>\n<LineString>\n<coordinates>\n'
segment_length = 0
else:
segment_length += 1
strcoordinates += ' %.8f,%.8f' \
- % (nmea.longitude/AIS_LATLON_SCALE,
- nmea.latitude/AIS_LATLON_SCALE)
+ % (nmea.longitude / AIS_LATLON_SCALE,
+ nmea.latitude / AIS_LATLON_SCALE)
strcoordinates += '</coordinates>\n</LineString></Placemark>\n'
- result = u''
- result += u'<name>%s track</name>\n' % name
- if len(strcoordinates)>39+2*(1+12+1+11)+42+1:
- result += unicode(strcoordinates)
+ result = ''
+ result += '<name>%s track</name>\n' % name
+ if len(strcoordinates) > 39 + 2 * (1 + 12 + 1 + 11) + 42 + 1:
+ result += strcoordinates
else:
- result += u'<description>No data available</description>\n'
+ result += '<description>No data available</description>\n'
return result
def kml_to_kmz(inputstr):
- if isinstance(inputstr, unicode):
+ if type(inputstr) == str:
inputstr = inputstr.encode('utf-8')
- output = StringIO()
+ output = BytesIO()
kmz = zipfile.ZipFile(output, 'w')
kmz.writestr('doc.kml', inputstr)
for iconname in STYLE.used_icons:
- kmz.write('/usr/share/ais/kmz_icons/'+iconname, iconname)
+ kmz.write('/usr/share/ais/kmz_icons/' + iconname, iconname)
kmz.close()
return output.getvalue()
-
+
def main():
global DBPATH, STYLE
parser.add_option('--filter-speedcheck',
action='store', type='int', dest='speedcheck', default=200, metavar='KNOTS',
- help='Eliminate erroneaous positions from results,'
+ help='Eliminate erroneaous positions from results,'
' based on impossible speed.')
parser.add_option('--filter-type',
parser.add_option('--label-size',
action='store', type='float', dest='label_size', metavar='SCALE', default=0.7,
help='Set label scale. Default = %default')
-
+
parser.add_option('--no-names',
action='store_const', const=KML_DISPLAYOPT_NONAMES,
dest='kml_displayopt_noname', default=0,
options, args = parser.parse_args()
-
if options.help_types:
keys = SHIP_TYPES.keys()
keys.sort()
for k in keys:
- print k, SHIP_TYPES[k]
+ print(k, SHIP_TYPES[k])
sys.exit(0)
DBPATH = options.db
# Ships selections
#
- if len(args)==0:
- print >> sys.stderr, "No ship to process"
+ if len(args) == 0:
+ print("No ship to process", file=sys.stderr)
sys.exit(1)
target_mmsi_iterator = []
if options.sdt_start:
# remove non digit characters
- options.sdt_start = "".join([ c for c in options.sdt_start if c.isdigit()])
- if len(options.sdt_start)==14:
+ options.sdt_start = "".join([c for c in options.sdt_start if c.isdigit()])
+ if len(options.sdt_start) == 14:
options.sdt_start = datetime.strptime(options.sdt_start, '%Y%m%d%H%M%S')
- elif len(options.sdt_start)==8:
+ elif len(options.sdt_start) == 8:
options.sdt_start = datetime.strptime(options.sdt_start, '%Y%m%d')
else:
- print >> sys.stderr, "Invalid format for --start option"
+ print("Invalid format for --start option", file=sys.stderr)
sys.exit(1)
if options.sdt_end:
# remove non digit characters
- options.sdt_end = "".join([ c for c in options.sdt_end if c.isdigit()])
- if len(options.sdt_end)==14:
+ options.sdt_end = "".join([c for c in options.sdt_end if c.isdigit()])
+ if len(options.sdt_end) == 14:
options.sdt_end = datetime.strptime(options.sdt_end, '%Y%m%d%H%M%S')
- elif len(options.sdt_end)==8:
+ elif len(options.sdt_end) == 8:
options.sdt_end = datetime.strptime(options.sdt_end, '%Y%m%d')
options.sdt_end = datetime.combine(options.sdt_end.date(), time(23, 59, 59))
else:
- print >> sys.stderr, "Invalid format for --end option"
+ print("Invalid format for --end option", file=sys.stderr)
sys.exit(1)
-
+
if options.sdt_duration:
# remove spaces
options.sdt_duration = options.sdt_duration.replace(' ', '')
duration_unit = 60
elif options.sdt_duration[-1] == 'H':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 60*60
+ duration_unit = 60 * 60
elif options.sdt_duration[-1] == 'D':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 24*60*60
+ duration_unit = 24 * 60 * 60
elif options.sdt_duration[-1] == 'W':
options.sdt_duration = options.sdt_duration[:-1]
- duration_unit = 7*24*60*60
+ duration_unit = 7 * 24 * 60 * 60
else:
duration_unit = 1
try:
options.sdt_duration = long(options.sdt_duration)
except ValueError:
- print >> sys.stderr, "Can't parse duration"
+ print("Can't parse duration", file=sys.stderr)
sys.exit(1)
options.sdt_duration = timedelta(0, options.sdt_duration * duration_unit)
if options.sdt_start or options.sdt_duration or options.granularity is not None or options.max_count:
# Time period is enabled (note that date_end only defaults to one day archives ending then)
if not options.sdt_start and not options.sdt_end and not options.sdt_duration:
- options.sdt_duration = timedelta(1) # One day
+ options.sdt_duration = timedelta(1) # One day
# continue without else
if not options.sdt_start and not options.sdt_end and options.sdt_duration:
dt_end = datetime.utcnow()
dt_start = dt_end - options.sdt_duration
- #elif not options.sdt_start and options.sdt_end and not options.sdt_duration:
+ # elif not options.sdt_start and options.sdt_end and not options.sdt_duration:
# never reached
elif not options.sdt_start and options.sdt_end and options.sdt_duration:
dt_end = options.sdt_end
dt_end = options.sdt_end
else:
assert options.sdt_start and options.sdt_end and options.sdt_duration, 'Internal error'
- print >> sys.stderr, "You can't have all 3 --start --end and --duration"
+ print("You can't have all 3 --start --end and --duration", file=sys.stderr)
sys.exit(1)
if options.granularity is None:
options.granularity = 600
options.max_count = 1
if options.granularity is None:
options.granularity = 600
-
+
logging.debug('--start is %s', dt_start)
logging.debug('--end is %s', dt_end)
#
filters = []
-
+
if options.filter_knownposition:
filters.append(filter_knownposition)
if options.speedcheck != 0:
- maxmps = options.speedcheck / 3600. # from knots to NM per seconds
+ maxmps = options.speedcheck / 3600 # from knots to NM per seconds
filters.append(lambda nmea: filter_speedcheck(nmea, maxmps))
if options.area_file:
lat = clean_latitude(unicode(options.close_to[0], 'utf-8'))
lon = clean_longitude(unicode(options.close_to[1], 'utf-8'))
except LatLonFormatError as err:
- print >> sys.stderr, err.args
+ print(err.args, file=sys.stderr)
sys.exit(1)
miles = float(options.close_to[2])
filters.append(lambda nmea: filter_close_to(nmea, lat, lon, miles))
lat = clean_latitude(unicode(options.far_from[0], 'utf-8'))
lon = clean_longitude(unicode(options.far_from[1], 'utf-8'))
except LatLonFormatError as err:
- print >> sys.stderr, err.args
+ print(err.args, file=sys.stderr)
sys.exit(1)
miles = float(options.far_from[2])
filters.append(lambda nmea: filter_far_from(nmea, lat, lon, miles))
-
+
if options.sog_le:
filters.append(lambda nmea: filter_sog_le(nmea, float(options.sog_le)))
if options.type_list:
def filter_type(nmea):
- #print nmea.type, repr(options.type_list), nmea.type in options.type_list
- #print repr(nmea.get_dump_row())
+ # print(nmea.type, repr(options.type_list), nmea.type in options.type_list)
+ # print(repr(nmea.get_dump_row()))
return nmea.type in options.type_list
filters.append(filter_type)
#
if options.format == 'positions':
- result = u''
+ result = ''
if not options.output_innerkml:
result += KML_HEADER
result += STYLE.make_header()
for mmsi in target_mmsi_iterator:
nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
for nmea in nmea_generator:
- result += format_boat_data(nmea, None, kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+ result += format_boat_data(nmea, None, kml_displayopt | KML_DISPLAYOPT_HISTORICAL)
if not options.output_innerkml:
result += KML_FOOTER
- elif options.format=='animation':
- result = u''
+ elif options.format == 'animation':
+ result = ''
if not options.output_innerkml:
result += KML_HEADER
result += STYLE.make_header()
nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
result += '<Folder>\n'
# TODO
- #result += u'<name>' + xml_escape(nmea.get_title()) + u'</name>\n'
- result += format_boat_intime_section(nmea_generator, kml_displayopt|KML_DISPLAYOPT_HISTORICAL)
+ # result += '<name>' + xml_escape(nmea.get_title()) + '</name>\n'
+ result += format_boat_intime_section(nmea_generator, kml_displayopt | KML_DISPLAYOPT_HISTORICAL)
result += '</Folder>\n'
if not options.output_innerkml:
result += KML_FOOTER
-
- elif options.format=='track':
- result = u''
+
+ elif options.format == 'track':
+ result = ''
if not options.output_innerkml:
result += KML_HEADER
# don't call STYLE.make_header since there is no icons
for mmsi in target_mmsi_iterator:
nmea_generator = NmeaFeeder(mmsi, dt_end, dt_start, filters, granularity=options.granularity, max_count=options.max_count)
- #result += '<Folder>\n'
+ # result += '<Folder>\n'
result += format_boat_track_section(nmea_generator)
- #result += '</Folder>\n'
+ # result += '</Folder>\n'
if not options.output_innerkml:
result += KML_FOOTER
else:
- print >> sys.stderr, 'Unknown output format'
+ print('Unknown output format', file=sys.stderr)
sys.exit(1)
-
+
result = result.encode('utf-8')
if not options.output_kml:
result = kml_to_kmz(result)
- print result
+ print(result)
+
if __name__ == '__main__':
main()
#!/usr/bin/env bash
umask 002
-sleep 26
+sleep 26.$RANDOM
set -e
-LOGPATH=/var/lib/ais/shipplotter/`date -u +%Y%m%d`
-LOGFILE=$LOGPATH/`date -u +%H%M%S`
-mkdir -p $LOGPATH
-python -m ais.extras.shipplotter_coaa > $LOGFILE
-python -m ais.extras.shipplotter_parselogs $LOGFILE
+#LOGPATH=/var/lib/ais/shipplotter/`date -u +%Y%m%d`
+#LOGFILE=$LOGPATH/`date -u +%H%M%S`
+#mkdir -p $LOGPATH
+python3 -m ais.extras.shipplotter_coaa
+#python3 -m ais.extras.shipplotter_parselogs $LOGFILE
log_daemon_msg "Starting AIS acquisition"
[ -e /var/run/ais ] || mkdir -p /var/run/ais && chown ais:ais /var/run/ais
- if start-stop-daemon --start --quiet --background --exec /usr/bin/python --user ais --pidfile $INPUTPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.inputs.run $RUN_PARAMS
+ if start-stop-daemon --start --quiet --background --exec /usr/bin/python3 --user ais --pidfile $INPUTPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.inputs.run $RUN_PARAMS
then
log_success_msg ais.inputs.run
else
log_daemon_msg "Starting AIS job runner"
if [ -r /etc/ais/database ]
then
- if start-stop-daemon --start --quiet --background --exec /usr/bin/python --user ais --pidfile $RUNNERPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.jobrunner --debug
+ if start-stop-daemon --start --quiet --background --exec /usr/bin/python3 --user ais --pidfile $RUNNERPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.jobrunner --debug
then
log_success_msg ais.jobrunner
else
RUN_PARAMS="$RUN_PARAMS --foreground"
log_daemon_msg "Stopping AIS acquisition"
- if start-stop-daemon --stop --quiet --exec /usr/bin/python --user ais --pidfile $INPUTPIDFILE -- -m ais.inputs.run $RUN_PARAMS
+ if start-stop-daemon --stop --quiet --exec /usr/bin/python3 --user ais --pidfile $INPUTPIDFILE -- -m ais.inputs.run $RUN_PARAMS
then
log_success_msg ais.inputs.run
else
log_daemon_msg "Stopping AIS job runner"
if [ -r /etc/ais/database ]
then
- if start-stop-daemon --start --quiet --background --exec /usr/bin/python --user ais --pidfile $RUNNERPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.jobrunner --debug
+ if start-stop-daemon --start --quiet --background --exec /usr/bin/python3 --user ais --pidfile $RUNNERPIDFILE --chuid ais:ais --umask 002 --make-pidfile -- -m ais.jobrunner --debug
then
log_success_msg ais.jobrunner
else
d_start
;;
*)
- log_success_msg "Usage: ais.sh {start|stop}"
+ log_success_msg "Usage: $(basename $0) {start|stop|restart}"
exit 1
;;
esac