#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# arch-tag: Simple patch queue manager for tla
# Copyright © 2003,2004 Colin Walters <walters@verbum.org>
# Copyright © 2003 Walter Landry

# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

# Some junk to try finding Python 2.3, if "python" on this system
# is too old.
import os,sys
if sys.hexversion >= 0x2030000:
    pass
else:
    if os.getenv('PYTHON'):
        try:
            os.execvp(os.getenv('PYTHON'), [os.getenv('PYTHON')] + sys.argv)
        except:
            1
    try:
        os.execvp('python2.3', ['python2.3'] + sys.argv)
    except:
        1
    sys.stderr.write("This program requires Python 2.3\n")
    sys.exit(1)

import string, stat, re, glob, getopt, time, traceback, gzip, getpass, popen2
import smtplib, email
import logging, logging.handlers
from ConfigParser import *

class GPGSigVerifierException(Exception):
    def __init__(self, value):
        self._value = value
    def __str__(self):
        return `self._value`

class GPGSigVerificationFailure(Exception):
    def __init__(self, value, output):
        self._value = value
        self._output = output
    def __str__(self):
        return `self._value`

    def getOutput(self):
        return self._output

class GPGSigVerifier:
    def __init__(self, keyrings, gpgv=None):
        self._keyrings = keyrings
        if gpgv is None:
            gpgv = '/usr/bin/gpgv'
        self._gpgv = gpgv

    def verify(self, filename, sigfilename=None):
        (stdin, stdout) = os.pipe()
        pid = os.fork()
        if pid == 0:
            os.close(stdin)
            os.dup2(stdout, 1)
            os.dup2(stdout, 2)
            args = [self._gpgv]
            args.append('--status-fd=2')
            for keyring in self._keyrings:
                args.append('--keyring')
                args.append(keyring)
            if sigfilename:
                args.append(sigfilename)
            args.append(filename)
            os.execvp(self._gpgv, args)
            os.exit(1)
        os.close(stdout)
        output = os.fdopen(stdin).readlines()
        (pid, status) = os.waitpid(pid, 0)
        if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
            if os.WIFEXITED(status):
                msg = "gpgv exited with error code %d" % (os.WEXITSTATUS(status),)
            elif os.WIFSTOPPED(status):
                msg = "gpgv stopped unexpectedly with signal %d" % (os.WSTOPSIG(status),)
            elif os.WIFSIGNALED(status):
                msg = "gpgv died with signal %d" % (os.WTERMSIG(status),)
            raise GPGSigVerificationFailure(msg, output)
        return output

def popen_noshell(cmd, *args):
    return apply(popen_noshell_with_input, [cmd, None] + list(args))

def popen_noshell_with_input(cmd, inputfd, *args):
    (stdin, stdout) = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(stdin)
	if inputfd is None:
	    inputfd = os.open('/dev/null', os.O_RDONLY)
	os.dup2(inputfd, 0)
        os.dup2(stdout, 1)
        os.dup2(stdout, 2)
        logger.info("running: " + string.join([cmd] + list(args),' '))
        os.execvp(cmd, [cmd] + list(args))
        os.exit(1)
    os.close(stdout)
    output = os.fdopen(stdin).readlines()
    (pid, status) = os.waitpid(pid, 0)
    msg = ''
    if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
        if os.WIFEXITED(status):
            msg = "%s exited with error code %d" % (cmd, os.WEXITSTATUS(status),)
        elif os.WIFSTOPPED(status):
            msg = "%s stopped unexpectedly with signal %d" % (cmd, os.WSTOPSIG(status),)
        elif os.WIFSIGNALED(status):
            msg = "%s died with signal %d" % (cmd, os.WTERMSIG(status),)
    return (status, msg, output)

arch_path = 'tla'
arch_impl = None
gnupatch_path = 'patch'
gpgv_path = 'gpgv'
logfile_name = 'arch-pqm.log'
configfile_names = ['/etc/arch-pqm.conf', '~/.arch-pqm.conf', '~/.tla-pqm.conf']
default_mail_log_level = logging.ERROR
mail_server = 'localhost'
queuedir = None
workdir = None
logdir = None
keyring = None
mail_reply = 1
verify_sigs = 1
myname = 'Arch Patch Queue Manager'
from_address = None
allowed_revisions = {}
precommit_hook = []

class AbstractArchHandler:
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_branch(self, sender, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_make_archive(self, sender, archive, location):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_register_archive(self, sender, archive, location):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_version(self, sender, archive, revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_whereis_archive(self, sender, archive):
        raise PQMTlaFailure(sender, 'Unsupported operation')

class ArXHandler(AbstractArchHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'merge', '--in-place',
                      '%s/%s' % (fromarchive, fromrevision))

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'archive-cache', '--add',
                      '%s/%s' % (fromarchive, fromrevision))
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'archive-cache', '--delete',
                      '%s/%s' % (fromarchive, fromrevision))

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'tag',
                      '%s/%s' % (fromarchive, fromrevision),
                      '%s/%s' % (to_archive, to_revision))
    
    def do_create_branch(self, sender, to_archive, to_revision):
        return runtla(sender, 'fork', '--non-interactive', '--in-place',
                      '%s/%s' % (to_archive, to_revision))
    
    def do_make_archive(self, sender, archive, location):
        return runtla(sender, 'make-archive',
                      '%s' % (archive), '%s' % (location))
    
    def do_register_archive(self, sender, archive, location):
        return runtla(sender, 'register-archive',
                      '%s' % (archive), '%s' % (location))
    
    def do_create_version(self, sender, archive, revision):
        return runtla(sender, 'init-tree', '--non-interactive',
                      '%s/%s' % (archive, revision))

    def do_whereis_archive(self, sender, archive):
        return runtla(sender, 'archives', '-l', '%s/' % (archive))

class TlaHandler(AbstractArchHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'star-merge', '%s/%s' % (fromarchive, fromrevision))

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'cacherev', '%s/%s' % (fromarchive, fromrevision))
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'uncacherev', '%s/%s' % (fromarchive, fromrevision))

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'tag', '%s/%s' % (fromarchive, fromrevision),
                      '%s/%s' % (to_archive, to_revision))
    
    def do_create_branch(self, sender, to_archive, to_revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))
    
    def do_make_archive(self, sender, archive, location):
        return runtla(sender, 'make-archive', '%s' % (archive), '%s' % (location))
    
    def do_register_archive(self, sender, archive, location):
        return runtla(sender, 'register-archive', '%s' % (archive), '%s' % (location))
    
    def do_create_version(self, sender, archive, revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))
    def do_whereis_archive(self, sender, archive):
        return runtla(sender, 'whereis-archive', '%s' % (archive))


def usage(ecode, ver_only=None):
    print "arch-pqm 0"
    if ver_only:
        sys.exit(ecode)
    print "Usage: arch-pqm [OPTIONS...] [DIRECTORY]"
    print "Options:"
    print "  -v, --verbose\t\tDisplay extra information"
    print "  -q, --quiet\t\tDisplay less information"
    print "  -c, --config=FILE\tParse configuration info from FILE"
    print "  -d, --debug\t\tOutput information to stdout as well as log"
    print "  --no-log\t\tDon't write information to log file"
    print "  -n, --no-act\t\tDon't actually perform changes"
    print "  -r, --read\t\tRead a request from stdin"
    print "  --run\t\tProcess queue"
    print "  --report\t\tPrint patch report (used with --run)"
    print "  --no-verify\t\tDon't verify signatures"
    print "  --queuedir=DIR\t\tPerform first-time configuration"
    print "  --keyring=FILE\t\tUse the specified GPG keyring"
    print "  --help\t\tWhat you're looking at"
    print "  --version\t\tPrint the software version and exit"
    sys.exit(ecode)

try:
    opts, args = getopt.getopt(sys.argv[1:], 'vqc:dnrk',
                               ['verbose', 'quiet', 'config=', 'debug', 'no-log',
                                'no-act', 'read', 'run', 'report', 'cron', 'no-verify',
                                'queuedir=', 'keyring=', 'help', 'version', ])
except getopt.GetoptError, e:
    sys.stderr.write("Error reading arguments: %s\n" % e)
    usage(1)
for (key, val) in opts:
    if key == '--help':
        usage(0)
    elif key == '--version':
        usage(0, ver_only=1)
if len(args) > 1:
    sys.stderr.write("Unknown arguments: %s\n" % args[1:])
    usage(1)

logger = logging.getLogger("arch-pqm")

loglevel = logging.WARN
no_act = 0
debug_mode = 0
run_mode = 0
read_mode = 0
cron_mode = 0
print_report = 0
no_log = 0
batch_mode = 0
custom_config_files = 0
for key, val in opts:
    if key in ('-v', '--verbose'):
        if loglevel == logging.INFO:
            loglevel = logging.DEBUG
        elif loglevel == logging.WARN:
            loglevel = logging.INFO
    elif key in ('-q', '--quiet'):
        if loglevel == logging.WARN:
            loglevel = logging.ERROR
        elif loglevel == logging.WARN:
            loglevel = logging.CRITICAL
    elif key in ('-c', '--config'):
        if not custom_config_files:
            custom_config_files = 1
            configfile_names = []
        configfile_names.append(os.path.abspath(os.path.expanduser(val)))
    elif key in ('--keyring'):
        keyring = val
    elif key in ('-n', '--no-act'):
        no_act = 1
    elif key in ('-d', '--debug'):
        debug_mode = 1
    elif key in ('--queuedir',):
        queuedir = val
    elif key in ('--keyring',):
        keyring = val
    elif key in ('--no-log',):
        no_log = 1
    elif key in ('--no-verify',):
        verify_sigs = 0
    elif key in ('-r', '--read'):
        read_mode = 1
    elif key in ('--run',):
        run_mode = 1
    elif key in ('--cron',):
        cron_mode = 1
    elif key in ('--report',):
        print_report = 1

def do_mkdir(name):
    if os.access(name, os.X_OK):
        return    
    try:
        logger.info('Creating directory "%s"' % (name))
    except:
        pass
    if not no_act:
        os.mkdir(name)

def do_rename(source, target):
    try:
        logger.debug('Renaming "%s" to "%s"' % (source, target))
    except:
        pass
    if not no_act:
        os.rename(source, target)

def do_chmod(name, mode):
    try:
        logger.info('Changing mode of "%s" to %o' % (name, mode))
    except:
        pass
    if not no_act:
        os.chmod(name, mode)

logger.setLevel(logging.DEBUG)
stderr_handler = logging.StreamHandler(strm=sys.stderr)
stderr_handler.setLevel(loglevel)
logger.addHandler(stderr_handler)
stderr_handler.setLevel(loglevel)
stderr_handler.setFormatter(logging.Formatter(fmt="%(name)s [%(thread)d] %(levelname)s: %(message)s"))

if not (read_mode or run_mode):
    logger.error("Either --read or --run must be specified")
    sys.exit(1)

configp = ConfigParser()
configfile_names = map(lambda x: os.path.abspath(os.path.expanduser(x)), configfile_names)
logger.debug("Reading config files: %s" % (configfile_names,))
configp.read(configfile_names)

if configp.has_option('DEFAULT', 'arch_path'):
    arch_path = configp.get('DEFAULT', 'arch_path')
elif configp.has_option('DEFAULT', 'tlapath'): 
    logger.warn("Option 'tlapath' is deprecated")
    arch_path = configp.get('DEFAULT', 'tlapath')

if os.access(arch_path, os.X_OK):
    logger.error("Can't execute \"%s\", please fix arch_path" % (arch_path,))
    sys.exit(1)

if configp.has_option('DEFAULT', 'arch_impl'):
    impl = configp.get('DEFAULT', 'arch_impl')
    if impl == 'tla':
        arch_impl = TlaHandler()
    elif impl == 'arx':
        arch_impl = ArXHandler()
    else:
        logger.error("Unknown arch_impl \"%s\"" % (impl,))
        sys.exit(1)
else:
    (status, msg, output) = popen_noshell(arch_path, '--version')
    for line in output:
        if line.find('tla ') >= 0:
            arch_impl = TlaHandler()
            break
        elif line.find('ArX ') >= 0:
            arch_impl = ArXHandler()
            break
    if not arch_impl:
        logger.error("Couldn't determine arch implementation, please set arch_impl")
        sys.exit(1)

if configp.has_option('DEFAULT', 'gpgv_path'):
    gpgv_path = configp.get('DEFAULT', 'gpgv_path')

if configp.has_option('DEFAULT', 'myname'):
    myname = configp.get('DEFAULT', 'myname')
if configp.has_option('DEFAULT', 'from_address'):
    from_address = configp.get('DEFAULT', 'from_address')
else:
    logger.error("No from_address specified")
    sys.exit(1)
fromaddr = '%s <%s>' % (myname, from_address)

if configp.has_option('DEFAULT', 'mail_reply'):
    mail_reply = configp.getboolean('DEFAULT', 'mail_reply')
if configp.has_option('DEFAULT', 'verify_sigs'):
    verify_sigs = configp.getboolean('DEFAULT', 'verify_sigs')

if not queuedir:
    if configp.has_option('DEFAULT', 'queuedir'):
        queuedir = os.path.expanduser(configp.get('DEFAULT', 'queuedir'))
    elif len(args) > 0:
        queuedir = args[0]
    else:
        logger.error("No queuedir specified on command line or in config files.")
        sys.exit(1)
queuedir=os.path.abspath(queuedir)

if not configp.has_option('DEFAULT', 'dont_set_home'):
	os.environ['HOME'] = queuedir

if configp.has_option('DEFAULT', 'workdir'):
    workdir = os.path.abspath(os.path.expanduser(configp.get('DEFAULT', 'workdir')))
else:
    workdir = os.path.join(queuedir, 'workdir')

if configp.has_option('DEFAULT', 'logdir'):
    logdir = os.path.abspath(os.path.expanduser(configp.get('DEFAULT', 'logdir')))
else:
    logdir = os.path.abspath(os.path.join(queuedir, 'logs'))

if not keyring:
    if configp.has_option('DEFAULT', 'keyring'):
        keyring = configp.get('DEFAULT', 'keyring')
    else:
        logger.error("No keyring specified on command line or in config files.")
        sys.exit(1)
if not os.access(keyring, os.R_OK):
    logger.error("Couldn't access keyring %s" % (keyring,))
    sys.exit(1)

sects = configp.sections()
if len(sects) > 0:
    for sect in sects:
        logger.info("managing revision: " + sect)
        allowed_revisions[sect] = {}
else:
    logger.error("No revisions to manage!")
    sys.exit(1)

class RevisionOptionHandler:
    def __init__(self, revisions, configp):
        self._configp = configp
        self._revisions = revisions
        self._optionmap = {}
        self._optionmap['precommit_hook'] = ['str', None]

    def get_option_map(self, dist):
        ret = self._revisions[dist]
        for key in self._optionmap.keys():
            type = self._optionmap[key][0]
            ret[key] = self._optionmap[key][1]
            if self._configp.has_option ('DEFAULT', key):
                ret[key] = self.get_option (type, 'DEFAULT', key)
            if self._configp.has_option (dist, key):
                ret[key] = self.get_option (type, dist, key)
        return ret            

    def get_option (self, type, dist, key):
        if type == 'int':
            return self._configp.getint(dist, key)
        elif type == 'str':
            return self._configp.get(dist, key)
        elif type == 'bool':
            return self._configp.getboolean(dist, key)

        assert(None)

rev_optionhandler = RevisionOptionHandler(allowed_revisions, configp)

for rev in allowed_revisions.keys():
    allowed_revisions[rev] = rev_optionhandler.get_option_map(rev)

do_mkdir(queuedir)
os.chdir(queuedir)
do_mkdir(workdir)
do_mkdir(logdir)
pqm_subdir = os.path.join(queuedir, 'arch-pqm')
do_mkdir(pqm_subdir)

if configp.has_option('DEFAULT', 'logfile'):
    logfile_name = configp.get('DEFAULT', 'logfile')

if not no_log:
    if not os.path.isabs(logfile_name):
        logfile_name = os.path.join(pqm_subdir, logfile_name)
    logger.debug("Adding log file: %s" % (logfile_name,))
    filehandler = logging.FileHandler(logfile_name)
    if loglevel == logging.WARN:
        filehandler.setLevel(logging.INFO)
    else:
        filehandler.setLevel(logging.DEBUG)
    logger.addHandler(filehandler)
    filehandler.setFormatter(logging.Formatter(fmt="%(asctime)s %(name)s [%(thread)d] %(levelname)s: %(message)s", datefmt="%b %d %H:%M:%S"))

if not (debug_mode or batch_mode):
    # Don't log to stderr past this point
    logger.removeHandler(stderr_handler)

class PQMException(Exception):
    def __init__(self, sender, msg):
        self.sender = sender
        self.msg = msg
    def __str__(self):
        return `self.msg`

class PQMTlaFailure(PQMException):
    def __init__(self, sender, output):
        self.sender = sender
        self.output = output
    pass

class PQMCmdFailure(Exception):
    def __init__(self, sender, goodcmds, badcmd, output):
        self.sender = sender
        self.goodcmds = goodcmds
        self.badcmd = badcmd
        self.output = output

transaction_file = os.path.join(queuedir, 'transactions-completed')
if os.access(transaction_file, os.R_OK):
    used_transactions = {}
    lines = open(transaction_file).readlines()
    for line in lines:
        used_transactions[line[0:-1]] = 1
else:
    used_transactions = {}

def runtla_internal(sender, cmd, *args):
    return apply(popen_noshell, [arch_path, cmd] + list(args))

def runtla(sender, cmd, *args):
    (status, msg, output) = apply(runtla_internal, [sender, cmd] + list(args))
    if not ((status is None) or (status == 0)):
        raise PQMTlaFailure(sender, ["arch command %s %s failed (%s): %s" % (cmd, args, status, msg)] + output)
    return output

def write_lines_to_fd(lines):
    (stdin, stdout) = os.pipe()
    pid = os.fork()
    if pid != 0:
	os.close(stdout)
	return stdin
    os.close(stdin)
    for line in lines:
	os.write(stdout, line)
	os.write(stdout, '\n')
    sys.exit(0)

def do_patch(sender, content):
    def is_patchline(line):
	return line != '' and (line[0] in ('+', '-') or line[0:2] == '@@')
    if content == []:
	raise PQMException(sender, "Empty patch content")
    if not is_patchline(content[0]):
	summary = content[0]
    else:
	raise PQMException(sender, "No summary given for patch")
    filenames = []
    for line in content:
	if line[0:4] in ('+++ ', '--- '):
	    # We intentionally include the date, etc - stripping it out is too hard and error-prone
	    filenames.insert(0, line[4:].strip())
    for filename in filenames:
	if (filename.find('/..') > 0) or (filename.find('../') > 0):
	    raise PQMException(sender, "Invalid backreferencing filename in patch: %s", filename)
	elif filename[0] == '/':
	    raise PQMException(sender, "Invalid absolute filename in patch: %s", filename)
    fd = write_lines_to_fd(content)
    (status, msg, output) = popen_noshell_with_input(gnupatch_path, fd, '-p1', '--batch', '--no-backup-if-mismatch')
    os.close(fd)
    if not ((status is None) or (status == 0)):
        raise PQMException(sender, ["patch command \"%s\" failed (%s): %s" % (gnupatch_path, status, msg)] + output)
    return (summary, output)

def validate_revision(sender, archive, revision, output):
    try:
	arch_impl.do_whereis_archive(sender,archive)
	runtla(sender, 'parse-package-name', revision)
    except PQMTlaFailure, e:
	raise PQMCmdFailure(sender, successful, line, output + e.output)

def run_precommit(sender, successful, archive, revision, output):
    hook = allowed_revisions[archive + '/' + revision]['precommit_hook']
    if not hook:
	hook = precommit_hook
    if hook:
	logger.info("running precommit hook: %s" % (hook,))
	output += ['\n', 'Executing pre-commit hook %s at %s' % (hook, time.strftime('%c')), '\n']
	child = popen2.Popen4(hook)
	child.tochild.close()
	output += child.fromchild.readlines()
	ecode = child.wait()
	if not ((ecode is None) or (ecode == 0)):
	    raise PQMCmdFailure(sender, successful, line, output + ['\npre-commit hook failed with error code %d at %s\n' % (ecode - 255, time.strftime('%c'))])
	output += ['\n', 'pre-commit hook succeeded at %s' % (time.strftime('%c')), '\n']
    return output
    
def run_commands(sender, commitmsg, msg):
    star_re = re.compile('^star-merge (\S+/\S+)\s+(\S+/\S+)\s*$')
    archive_cache_re = re.compile('^archive-cache-revision (\S+/\S+)\s*$')
    archive_uncache_re = re.compile('^archive-uncache-revision (\S+/\S+)\s*$')
    tag_re = re.compile('^tag (\S+/\S+)\s+(\S+/\S+)\s*$')
    make_archive_re = re.compile('^make-archive (\S+)\s+(\S+)\s*$')
    my_id_re = re.compile('^my-id (\S.*)\s*$')
    register_archive_re = re.compile('^register-archive (\S+)\s+(\S+)\s*$')
    create_branch_re = re.compile('^create-branch (\S+/\S+)\s+(\S+/\S+)\s*$')
    create_version_re = re.compile('^create-version (\S+/\S+)\s*$')
    patch_re = re.compile('^patch (\S+/\S+)\s*$')
    whitespace_re = re.compile('^\s*$')
    pgp_re = re.compile('^-----BEGIN PGP.*MESSAGE')
    pgp_end_re = re.compile('^-----BEGIN PGP SIG')
    accumulating_patch = False
    patch_target = None
    patch_content = []
    successful = []
    unrecognized = []
    output = []
    skipnext = None
    logger.info("cleaning working directory")
    cleanup_wd()
    logger.info("parsing commands")
    for line in msg.split('\n'):
        if skipnext:
            skipnext = None
            continue
        if whitespace_re.match(line):
            continue
        if pgp_re.match(line):
            skipnext=1
            continue
        if pgp_end_re.match(line):
            break
	patch_match = patch_re.match(line)
        star_match = star_re.match(line)
        archive_cache_match=archive_cache_re.match(line)
        archive_uncache_match=archive_uncache_re.match(line)
        tag_match=tag_re.match(line)
        create_branch_match=create_branch_re.match(line)
        make_archive_match=make_archive_re.match(line)
        my_id_match=my_id_re.match(line)
        register_archive_match=register_archive_re.match(line)
        create_version_match=create_version_re.match(line)
	if patch_match:
	    # GNU Patch
	    logger.info("patch content found, target: %s", patch_match.group(1))
	    patch_target = patch_match.group(1)
	    accumulating_patch = True
	    continue
	elif accumulating_patch:
	    patch_content.append(line)
	    continue
        elif star_match:
	    # Star-merge
            from_archive_revision = star_match.group(1)
            to_archive_revision = star_match.group(2)
            (from_archive, from_revision) = from_archive_revision.split('/', 1)
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
#             if not allowed_revisions.has_key(to_archive_revision):
#                 raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
	    validate_revision(sender, from_archive, from_revision, output)
	    validate_revision(sender, to_archive, to_revision, output)
            logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
            dir = get_wd(sender, to_archive, to_revision)
            origdir = os.getcwd()
            output += ['\n', 'Executing star-merge %s/%s at %s' % (from_archive, from_revision,
                                                                   time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                try:
                    output += arch_impl.do_star_merge(sender, from_archive, from_revision, to_archive, to_revision)
                except PQMTlaFailure, e:
                    raise PQMCmdFailure(sender, successful, line, output + e.output)
            except:
                os.chdir(origdir)
                raise
	    output = run_precommit(sender, successful, to_archive, to_revision, output)
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'star-merge succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', commitmsg)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        elif archive_cache_match:
            # Cache a revision
            archive_revision = archive_cache_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
##                 if not allowed_revisions.has_key(archive_revision):
##                     raise PQMException(sender, "Disallowed archive/revision: " + archive_revision)
            try:
                arch_impl.do_whereis_archive(sender,archive)
                runtla(sender, 'parse-package-name', revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            output += ['\n', 'Executing archive-cache-revision %s/%s at %s' % (archive, revision,
                                                                               time.strftime('%c')), '\n']
            try:
                output += arch_impl.do_archive_cache(sender, archive,revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'archive-cache-revision succeeded at %s' % (time.strftime('%c')), '\n']
        elif archive_uncache_match:
            # Uncache a revision
            archive_revision = archive_uncache_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
##                     if not allowed_revisions.has_key(archive_revision):
##                         raise PQMException(sender, "Disallowed archive/revision: " + archive_revision)
            try:
                arch_impl.do_whereis_archive(sender,archive)
                runtla(sender, 'parse-package-name', revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            output += ['\n', 'Executing archive-uncache-revision %s/%s at %s' % (archive, revision,
                                                                                 time.strftime('%c')), '\n']
            try:
                output += arch_impl.do_archive_uncache(sender, archive, revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'archive-uncache-revision succeeded at %s' % (time.strftime('%c')), '\n']
        elif tag_match:
            # Tag a branch
            from_archive_revision = tag_match.group(1)
            to_archive_revision = tag_match.group(2)
            (from_archive, from_revision) = from_archive_revision.split('/', 1)
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
##                         if not allowed_revisions.has_key(to_archive_revision):
##                             raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
            try:
                arch_impl.do_whereis_archive(sender,from_archive)
                arch_impl.do_whereis_archive(sender,to_archive)
                runtla(sender, 'parse-package-name', from_revision)
                runtla(sender, 'parse-package-name', to_revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            output += ['\n', 'Executing tag %s/%s at %s' % (from_archive, from_revision,
                                                            time.strftime('%c')), '\n']
            try:
                output += arch_impl.do_tag(sender, from_archive,
                                           from_revision,
                                           to_archive, to_revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'tag succeeded at %s' % (time.strftime('%c')), '\n']
        elif create_branch_match:
            # Create a branch
            from_archive_revision = create_branch_match.group(1)
            to_archive_revision = create_branch_match.group(2)
            (from_archive, from_revision) = from_archive_revision.split('/', 1)
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
##                             if not allowed_revisions.has_key(to_archive_revision):
##                                 raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
            try:
                arch_impl.do_whereis_archive(sender,from_archive)
                arch_impl.do_whereis_archive(sender,to_archive)
                runtla(sender, 'parse-package-name', from_revision)
                runtla(sender, 'parse-package-name', to_revision)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
            dir = get_wd(sender, from_archive, from_revision)
            origdir = os.getcwd()
            output += ['\n', 'Executing create-branch %s/%s %s/%s at %s' % (from_archive, from_revision, to_archive, to_revision,
                                                                            time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                try:
                    output += arch_impl.do_create_branch(sender,
                                                         to_archive,
                                                         to_revision)
                except PQMTlaFailure, e:
                    raise PQMCmdFailure(sender, successful, line, output + e.output)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'create-branch succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', commitmsg)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        elif make_archive_match:
            # Make an archive
            archive = make_archive_match.group(1)
            location = make_archive_match.group(2)
            output += ['\n', 'Executing make-archive %s %s at %s' % (archive, location,
                                                                     time.strftime('%c')), '\n']
            try:
                output += arch_impl.do_make_archive(sender, archive,
                                                    location)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'make-archive succeeded at %s' % (time.strftime('%c')), '\n']
        elif register_archive_match:
            # Register an archive.  Note that this
            # also works for unregistering an
            # archive by making the archive=='-d'
            # and the location be the archive.
            archive = register_archive_match.group(1)
            location = register_archive_match.group(2)
            output += ['\n', 'Executing register-archive %s %s at %s' % (archive, location,
                                                                         time.strftime('%c')), '\n']
            try:
                output += arch_impl.do_register_archive(sender, archive,
                                                        location)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'register-archive succeeded at %s' % (time.strftime('%c')), '\n']
        elif create_version_match:
            # Create a new line of development
            archive_revision = create_version_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
	    validate_revision(sender, archive, revision, output)
            logger.info("getting working dir for %s/%s" % (archive, revision))
            
            dirpath=os.path.join(workdir, archive)
            
            if not os.access(dirpath, os.W_OK):
                os.mkdir(dirpath)
                
            os.chdir(dirpath)
            dir=os.path.join(dirpath, revision)        
            if os.access(dir, os.W_OK):
                raise PQMException(sender, "Working dir already exists: " + dir)
            os.mkdir(dir)
            
            
            origdir = os.getcwd()
            output += ['\n', 'Executing create-version %s/%s at %s' % (archive, revision, time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                try:
                    output += arch_impl.do_create_version(sender,
                                                          archive,
                                                          revision)
                except PQMTlaFailure, e:
                    raise PQMCmdFailure(sender, successful, line, output + e.output)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'create-version succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', commitmsg)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        elif my_id_match:
            myid = my_id_match.group(1)
            try:
                runtla(sender, 'my-id', myid)
            except PQMTlaFailure, e:
                raise PQMCmdFailure(sender, successful, line, output + e.output)
            logger.info("success: %s" % (line,))
            successful.append(line)
            output += ['\n', 'my-id succeeded at %s' % (time.strftime('%c')), '\n']
        else:
            unrecognized.append(line)
    if patch_content != []:
	to_archive_revision = patch_target
	(to_archive, to_revision) = to_archive_revision.split('/', 1)
	validate_revision(sender, to_archive, to_revision, output)
	logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
	origdir = os.getcwd()
	dir = get_wd(sender, to_archive, to_revision)
	try:
	    os.chdir(dir)
	    try:
		output += ['\n']
		(summary, moreoutput) = do_patch(sender, patch_content)
		output += moreoutput
		output += ['\n']
	    except PQMTlaFailure, e:
		raise PQMCmdFailure(sender, successful, 'patch ' + patch_target, output + e.output)
	except:
	    os.chdir(origdir)
	    raise
	logger.info("executing patch")
	output = run_precommit(sender, successful, to_archive, to_revision, output)
	successful.append('patch ' + patch_target)
	output += ['\n', 'patch succeeded at %s' % (time.strftime('%c')), '\n']
	try:
	    os.chdir(dir)
	    runtla(sender, 'commit', '-s', summary)
	    logger.info("commit succeeded")
	finally:
	    os.chdir(origdir)
    cleanup_wd()
    return (successful, unrecognized, output)

def get_wd(sender, archive, revision):
    dirpath=os.path.join(workdir, archive)

    if not os.access(dirpath, os.W_OK):
        os.mkdir(dirpath)

    os.chdir(dirpath)
    fullpath=os.path.join(dirpath, revision)        
    if os.access(fullpath, os.W_OK):
        raise PQMException(sender, "Working dir already exists: " + fullpath)
    runtla(sender, 'get', '%s/%s' % (archive, revision), revision)
    return fullpath

def rm_rf(top):
    for root, dirs, files in os.walk(top, topdown=False):
        for name in files:
            os.remove(os.path.join(root, name))
        for name in dirs:
            if os.path.islink(os.path.join(root, name)):
                os.remove(os.path.join(root, name))
            else:
                os.rmdir(os.path.join(root, name))

def cleanup_wd():
   for top in os.listdir(workdir):
       rm_rf(os.path.join(workdir, top))

def verify_sig(sender, msg, sig):
    verifier = GPGSigVerifier([keyring], gpgv=gpgv_path)
    try:
        tmp_msgpath=os.path.join(pqm_subdir,'tmp-msg')
        open(tmp_msgpath, 'w').write(msg)
        if sig:
            tmp_sigpath=os.path.join(pqm_subdir,'tmp-sig')
            open(tmp_sigpath, 'w').write(sig)
        else:
            tmp_sigpath=None
        output = verifier.verify(tmp_msgpath, tmp_sigpath)
        os.unlink(tmp_msgpath)
        if sig:
            os.unlink(tmp_sigpath)
    except GPGSigVerificationFailure, e:
        raise PQMException(sender, "Failed to verify signature: " + e._value)
    gpgre = re.compile('^\[GNUPG:\] (SIG_ID.+)$')
    sigid = None
    for line in output:
        match = gpgre.match(line)
        if match:
            sigid = match.group(1)
            break
    if not sigid:        
        raise PQMException(sender, "Couldn't determine signature timestamp")
    if used_transactions.has_key(sigid):
        logger.error("Replay attack detected, aborting")
        raise PQMException(sender, "Replay attack detected, aborting")
    return sigid

def read_email():
    msg = email.message_from_file(sys.stdin)
    sender = msg['From']
    subject = msg['Subject']
    if not sender:
        raise PQMException(None, "No From specified")
    if (not subject) or subject == '':
        raise PQMException(sender, "No Subject specified")
    text = None
    sig = None
    if msg.is_multipart():
        parts = msg.get_payload()
        if not len(parts) == 2:
            raise PQMException(sender, "Multipart message must have exactly two parts")
        if not parts[0].get_content_type() == 'text/plain':
            raise PQMException(sender, "First part of multipart message must be text/plain")
        if not parts[1].get_content_type() == 'application/pgp-signature':
            raise PQMException(sender, "Second part of multipart message must be application/pgp-signature")
        return (sender, subject, parts[0].get_payload(), parts[1].get_payload())
    else:
        return (sender, subject, msg.get_payload(), None)

if read_mode:
    sender = None
    try:
        (sender, subject, msg, sig) = read_email()
        if verify_sigs:
            sigid = verify_sig(sender, msg, sig)
            open(transaction_file, 'a').write(sigid + '\n')
        fname = 'patch.%d' % (time.time())
        logger.info('new patch ' + fname)
        f = open('tmp.' + fname, 'w')
        f.write('From: ' + sender + '\n')
        f.write('Subject: ' + subject + '\n')
        f.write(string.join(re.split('\r?\n', msg), '\n')) # canonicalize line endings
        f.close()
        os.rename('tmp.' + fname, fname)
    except:
        if sender and mail_reply:
            server = smtplib.SMTP(mail_server)
            tb=string.join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback), '')
            server.sendmail(from_address, [sender], 'From: %s\r\nTo: %s\r\nSubject: error processing requests\r\n\r\n' % (fromaddr, sender) + 'An error was encountered:\n' + tb)
            server.quit()
        logger.exception("Caught exception")
        sys.exit(1)
    sys.exit(0)

assert(run_mode)

lockfilename = os.path.join(pqm_subdir, 'arch-pqm.lock')

def acquire_lock_file():
    logger.info('creating lockfile')
    if not no_act:
        try:
            fd = os.open(lockfilename, os.O_CREAT | os.O_EXCL)
        except OSError, e:
            if cron_mode:
                logger.info("lockfile %s already exists, exiting", lockfilename)
                sys.exit(0)
            else:
                logger.error("Couldn't create lockfile: %s", lockfilename)
                sys.exit(1)

acquire_lock_file()

if run_mode:
    patches=[]
    patches_re=re.compile('^patch\.\d+$')
    for f in os.listdir(queuedir):
        if patches_re.match(f):
            fname=os.path.join(queuedir, f)
            patches.append((fname, os.stat(fname)[stat.ST_MTIME]))
    def sortpatches(a, b):
        return cmp(a[1],b[1])
    patches.sort(sortpatches)
    (goodpatches, badpatches) = ([], [])
    for patch in patches:
        logger.info('trying patch ' + patch[0])
        logname = os.path.join(logdir, os.path.basename(patch[0]) + '.log')
        msg = email.message_from_file(open(patch[0]))
        sender = msg['From']
        success = False
        failed_output = []
        try:
            (successes, unrecognized, output) = run_commands(msg['From'], msg['Subject'], msg.get_payload())
            logger.info('successes: %s' % (successes,))
            logger.info('unrecognized: %s' % (unrecognized,))
            success = True
            goodpatches.append((patch[0], logname))
            f = open(logname, 'w')
            for l in output:
                f.write(l)
            f.close()
        except PQMCmdFailure, e:
            badpatches.append((patch[0], logname))
            successes = e.goodcmds
            failedcmd = e.badcmd
            failed_output = e.output
            f = open(logname, 'w')
            for l in e.output:
                f.write(l)
            f.close()
        os.unlink(patch[0])
        if not mail_reply:
            logger.info('not sending mail reply')
            continue
        retmesg = []
        if success:
            for success in successes:
                retmesg.append('> ' + success)
                retmesg.append('Command was succcessful.')
                for line in unrecognized:
                    retmesg.append('> ' + line)
                    retmesg.append('Unrecognized command.')
                    retmesg = string.join(retmesg, '\n')
            if len(successes) > 0:
                statusmsg='success'
            else:
                statusmsg='no valid commands given'
            server = smtplib.SMTP(mail_server)
            server.sendmail(from_address, [sender], ('From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n' % (fromaddr, sender, statusmsg)) + string.join(retmesg, '\n'))
            server.quit()
        else:
            for success in successes:
                retmesg.append('> ' + success)
                retmesg.append('Command passed checks, but was not committed.')
                for line in unrecognized:
                    retmesg.append('> ' + line)
                    retmesg.append('Unrecognized command.')
                    retmesg = string.join(retmesg, '\n')
            retmesg.append('> ' + failedcmd)
            retmesg.append('Command failed!')
            retmesg.append('Last 20 lines of log output:')
            for l in failed_output[-20:]:
                retmesg.append(l)
            server = smtplib.SMTP(mail_server)
            server.sendmail(from_address, [sender], ('From: %s\r\nTo: %s\r\nSubject: failure\r\n\r\n' % (fromaddr, sender)) + string.join(retmesg, '\n'))
            server.quit()
            
    if print_report:
        for (patchname, logname) in goodpatches:
            print "Patch: " + patch[0]
            print "Status: success"
            print "Log: " + logname
            print
        for (patchname, logname) in badpatches:
            print "Patch: " + patch[0]
            print "Status: failure"
            print "Log: " + logname
            print
logger.debug('Removing lock file: ' + lockfilename)
if not no_act:
    os.unlink(lockfilename)
logger.info("main thread exiting...")
sys.exit(0)
