change apache umask in /etc/apache2/envvars from 022 to 002 so that new folders are group writables
+a2enmod headers rewrite
start_time = models.DateTimeField(blank=True, null=True)
finish_time = models.DateTimeField(blank=True, null=True)
command = models.TextField()
- extension = models.CharField(max_length=8)
+ friendly_filename = models.CharField(max_length=255)
pid = models.IntegerField(blank=True, null=True)
result = models.IntegerField(blank=True, null=True)
archive_time = models.DateTimeField(blank=True, null=True)
#Normalize 403 errors
import os
+import os.path
from datetime import *
import re
from time import time as get_timestamp
job = Job()
job.user = request.user
job.command = command
- job.extension = u'kmz'
+ job.friendly_filename = u'%s.kmz' % strmmsi
job.save()
#request.user.info('Request queued as job %s' % job.id)
if not jobrunner.wakeup_daemon():
job = Job()
job.user = request.user
job.command = command
- job.extension = u'kmz'
+ job.friendly_filename = u'%s.kmz' % strmmsi
job.save()
#request.user.info('Request queued as job %s' % job.id)
if not jobrunner.wakeup_daemon():
job = Job()
job.user = request.user
job.command = command
- job.extension = u'csv'
+ job.friendly_filename = u'%s.csv' % strmmsi
job.save()
#request.user.info('Request queued as job %s' % job.id)
if not jobrunner.wakeup_daemon():
if not job.archive_time:
job.archive_time = datetime.utcnow()
job.save()
- return HttpResponseRedirect('/job_result/%s.%s' % (job.id, job.extension))
+ extension = os.path.splitext(job.friendly_filename)[-1]
+ return HttpResponseRedirect('/job_result/%s%s/%s' % (job.id, extension, job.friendly_filename))
@http_authenticate(auth, 'ais')
def users(request):
import sys
import os
+import os.path
import time
import logging
import subprocess
logging.debug('Job %s is running: pid=%s', row[0], row[1])
return 1
- sqlexec(u'SELECT id, command, extension, user_id FROM job WHERE start_time IS NULL ORDER BY queue_time LIMIT 1')
+ sqlexec(u'SELECT id, command, friendly_filename, user_id FROM job WHERE start_time IS NULL ORDER BY queue_time LIMIT 1')
row = get_common_cursor().fetchone()
if row is None:
logging.debug('Queue is empty.')
return 0
- jobid, command, extension, user_id = row
+ jobid, command, friendly_filename, user_id = row
logging.info('Starting job %s: %s', jobid, command)
sqlexec(u'UPDATE job SET start_time=now() WHERE id=%(jobid)s', {'jobid': jobid})
dbcommit()
- output = file('/var/lib/ais/jobs/'+unicode(jobid)+'.'+extension, 'wb')
+ extension = os.path.splitext(friendly_filename)[-1]
+ output = file('/var/lib/ais/jobs/'+unicode(jobid)+extension, 'wb')
p = subprocess.Popen(command, stdout=output, shell=True)
logging.debug('System process id = %s', p.pid)
sqlexec(u'UPDATE job SET pid=' + unicode(p.pid) + ' WHERE id=%(jobid)s', {'jobid': jobid})
start_time timestamp without time zone,
finish_time timestamp without time zone,
command text NOT NULL,
- extension character varying(8) NOT NULL,
pid integer,
result integer,
- archive_time timestamp without time zone
+ archive_time timestamp without time zone,
+ friendly_filename character varying(255)
);