#!/usr/bin/python3
# coding: utf-8

import click
import re
from tempfile import TemporaryDirectory
from os.path import isdir, join, isfile, abspath, dirname, \
    realpath, splitext, split as split_path
from subprocess import check_call, check_output
from shutil import which
from shlex import quote as shell_quote

__version__ = "0.1"

NONE, LOW = 0, 1
VERBOSITY = NONE

# USEFUL ROUTINES

def cached_eval(f, cache):
    if len(cache) == 0:
        cache.append(f())
    return cache[0]

def cached_constant(f):
    cache = []
    return lambda: cached_eval(f, cache)

def cached_property(f):
    cache = []
    def _f(self):
        return cached_eval(lambda: f(self), cache)
    return property(_f)

tmpdir = cached_constant(lambda: TemporaryDirectory())

def tmppath(name):
    '''get a temp. path'''
    tmp_directory = tmpdir()
    return join(tmp_directory.name, name)

def log(msg, v = 0):
    if VERBOSITY == NONE:
        if v == 0:
            click.secho('LOG {}'.format(msg), fg = 'green')
    else:
        if v <= VERBOSITY:
            click.secho('LOG[{}] {}'.format(v, msg), fg = 'green')

def log_check_call(cmd, **kwds):
    log('Run (check): {}'.format(cmd), LOW)
    return check_call(cmd, **kwds)

def log_check_output(cmd, **kwds):
    log('Run (output): {}'.format(cmd), LOW)
    return check_output(cmd, **kwds)

@cached_constant
def find_bundle_files():
    '''finds the location of bundle files'''
    debocker_dir = dirname(abspath(realpath(__file__)))
    locations = [ debocker_dir, '/usr/share/debocker' ]
    for loc in locations:
        bundle_files = join(loc, 'bundle-files')
        if isdir(bundle_files):
            log("Bundle files found in '{}'.".format(bundle_files), LOW)
            return bundle_files
    assert False, 'Could not find bundle files'

# DOCKER-RELATED ROUTINES

@cached_constant
def assert_docker():
    if which('docker') is None:
        raise click.UsageError('docker is not available')

# TODO: probably use python-debian
class Package:

    def __init__(self, path):
        self.path = path
        self.debian = join(self.path, 'debian')
        self.control = join(self.debian, 'control')
        self.changelog = join(self.debian, 'changelog')
        self.source_format = join(self.debian, 'source', 'format')

    def is_valid(self):
        '''verifies that the current directory is a debian package'''
        return isdir(self.debian) and isfile(self.control) and \
            isfile(self.source_format) and self.format in ['native', 'quilt']

    def assert_is_valid(self):
        if not self.is_valid():
            raise click.UsageError('not in Debian package directory')

    @cached_property
    def format(self):
        with open(self.source_format) as f:
            line = f.readline().strip()
        m = re.match(r'^3\.0 \((native|quilt)\)', line)
        assert m, 'Unsupported format ({})'.format(line)
        fmt = m.group(1)
        log("Detected format '{}'.".format(fmt), LOW)
        return fmt

    @cached_property
    def native(self):
        return self.format == 'native'

    @cached_property
    def name(self):
        with open(self.control) as f:
            for line in f:
                m = re.match(r'^Source: (\S+)$', line)
                if m:
                    return m.group(1)
        assert False, 'could not find the name of the package'

    @cached_property
    def long_version(self):
        '''long version'''
        with open(self.changelog) as f:
            line = f.readline()
        m = re.match(r'^(\S+) \((\S+)\)', line)
        assert m, 'could not parse package version'
        return m.group(2)

    @cached_property
    def version(self):
        '''upstream version'''
        if self.native:
            return self.long_version
        else:
            m = re.match(r'^(.+)-(\d+)$', self.long_version)
            assert m, 'could not parse version ({})'.format(self.long_version)
            return m.group(1)

    @cached_property
    def orig_tarball_candidates(self):
        '''possible original upstream tarballs'''
        formats = [ 'xz', 'gz', 'bz2' ]
        names = [ '{}_{}.orig.tar.{}'.format(self.name, self.version, fmt)
                  for fmt in formats ]
        return [ join(self.path, '..', name) for name in names  ]

    @cached_property
    def orig_tarball(self):
        '''finds the original tarball'''
        for tarball in self.orig_tarball_candidates:
            if isfile(tarball):
                log("Original tarball found at '{}'.".format(tarball), LOW)
                return tarball
        assert False, 'could not find original tarball'

    def assert_orig_tarball(self):
        return self.orig_tarball  # simple alias

    def tar_bundle_files(self, output, stdout):
        '''tars basic bundle files, must be first'''
        bundle_files = find_bundle_files()
        tar = [ 'tar', 'cvf', output, '-C', bundle_files, '.' ]
        log_check_call(tar, stdout = stdout)

    def tar_package_source(self, output, stdout):
        tar = [ 'tar', 'rvf', output,
                '--transform', 's,^.,./pkg/sources,',
                '-C', self.path, '.' ]
        log_check_call(tar, stdout = stdout)

    def tar_original_tarball(self, output, stdout):
        orig = self.assert_orig_tarball()
        orig_dir, orig_file = split_path(orig)
        tar = [ 'tar', 'rvf', output,
                '--transform', 's,^,./pkg/,',
                '-C', orig_dir, orig_file ]
        log_check_call(tar, stdout = stdout)

    def build_docker_tarball(self, output):
        '''builds the docker tarball that builds the package'''
        tar_log = tmppath('tar.log')  # to store tar output
        with open(tar_log, 'wb') as tar_log_file:
            self.tar_bundle_files(output, stdout = tar_log_file)
            self.tar_package_source(output, stdout = tar_log_file)
            if not self.native:
                self.tar_original_tarball(output, stdout = tar_log_file)

    COMPRESSORS = {
        '.xz': [ 'xz', '-f' ],
        '.gz': [ 'gzip', '-f' ],
        '.bz2': [ 'bzip2', '-f' ],
    }

    def build_docker_tarball_comp(self, output):
        '''builds compressed tarball'''
        base, ext = splitext(output)
        if ext in Package.COMPRESSORS:
            prog = Package.COMPRESSORS[ext]
            log("Using '{}' to compress.".format(prog[0]), LOW)
            self.build_docker_tarball(base)
            log_check_call(prog + [ base ])
            assert isfile(output), 'Wrong archive produced'
        else:
            self.build_docker_tarball(output)


# CLI INTERFACE

@click.group()
@click.option('-v', '--verbose', count=True,
              help = 'be verbose, repeat for more effect')
def cli(verbose):
    global VERBOSITY
    VERBOSITY = verbose

@cli.command(help = 'Build tar bundle')
@click.option('-o', '--output', default = None,
              help = 'output file')
def bundle(output):
    pkg = Package('.')
    pkg.assert_is_valid()
    if output is None:
        name = '{}-{}-bundle.tar.xz'.format(pkg.name, pkg.long_version)
        output = join('..', name)
    log('Preparing bundle for {} ({})...'.format(pkg.name, pkg.version))
    if not pkg.native:
        pkg.assert_orig_tarball()
    pkg.build_docker_tarball_comp(output)
    log("Bundle created in '{}'.".format(output))

@cli.command('build-bundle', help = 'Build bundle')
@click.argument('bundle')
@click.option('-o', '--output', default = '.', help = 'output directory')
def build_bundle(bundle, output):
    assert_docker()
    build_log = tmppath('build.log')
    bundle_esc = shell_quote(bundle)
    build_log_esc = shell_quote(build_log)
    # TODO: quite ugly, cannot be done cleaner?
    log_check_call('docker build - < {} | tee {}'.
                   format(bundle_esc, build_log_esc), shell = True)
    with open(build_log) as f:
        s = f.read().strip()
        ms = re.findall(r'Successfully built (\S+)', s)
        assert len(ms) == 1, 'Cannot parse logs'
        image = ms[0]
    log('Build successful (in {})'.format(image))
    # extract the build
    # TODO: should it untar on-the-fly?
    build_tar = tmppath('build-tar.tar')
    with open(build_tar, 'wb') as f:
        log_check_call([ 'docker', 'run', image, '/root/build-tar' ],
                       stdout = f)
    log("Build tar stored in '{}'".format(build_tar))
    tar_list = log_check_output([ 'tar', 'tf', build_tar ])
    tar_files = tar_list.decode('utf-8').split()
    log("Build files: {}".format(' '.join(tar_files)), LOW)
    log_check_call([ 'tar', 'xf', build_tar, '-C', output ])
    log("Build files stored in '{}'.".format(output))

@cli.command(help = 'Build package')
@click.option('-o', '--output', default= '..', help = 'output directory')
@click.pass_context
def build(ctx, output):
    assert_docker()
    tarball_path = tmppath('bundle.tar')
    ctx.invoke(bundle, output = tarball_path)
    ctx.invoke(build_bundle, bundle = tarball_path, output = output)

if __name__ == '__main__':
    cli()
