#!/usr/bin/env python3

# Follow the instructions in https://wiki.ubuntu.com/SimpleSbuild

import datetime
import tempfile
import time
import json
import os
import glob
import git
import shutil
import pysftp
from git import Repo
import subprocess
from email.utils import formatdate
import re
import sys
import platform
import argparse

parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--jobs', type=int, default=0, help='Number of parallel jobs')
args = parser.parse_args()
if args.jobs > 0:
  default_sbuild_flags=["--jobs=%d" % args.jobs]
else:
  default_sbuild_flags=[]

ubuntu_or_debian={
  "stretch":"debian",
  "jessie":"debian",
  "wheezy":"debian",
  "trusty":"ubuntu",
  "xenial":"ubuntu",
  "yakkety":"ubuntu",
  "zesty":"ubuntu",
  "artful":"ubuntu",
  "bionic":"ubuntu"
}
nocrosscompile=["jessie", "trusty", "wheezy", "xenial"]
noarm=["yakkety","zesty","artful"]
archs = {
  'armv7l': ['armhf'],
  'x86_64': ['amd64', 'i386']
}[platform.machine()]

def readable_sha(sha):
  if len(sha) in [7,40]:
    return sha
  elif len(sha)==20:
    s=""
    for i in range(0,20):
      s+=("%0.2x" % int(sha[i]))
    return s
  else:
    raise(Exception("Can only handle 20 or 40-len sha's, got %s" % sha))

def makeRepo(path, url, bare=False):
  if os.path.exists(path):
    repo = Repo(path)
    origin = repo.remotes.origin
    if not (url == origin.url):
      repo.delete_remote("origin")
      origin = repo.create_remote("origin", url=url)
  else:
    repo = Repo.init(path, bare=bare)
    origin = repo.create_remote("origin", url=url)
    assert(url == origin.url)
    origin.fetch()
    repo.create_head('master', origin.refs.master).set_tracking_branch(origin.refs.master)
  master_head = None
  for fetch_info in origin.fetch():
    if str(fetch_info.ref) == "origin/master":
      print("Updated %s %s to %s" % (url, fetch_info.ref, fetch_info.commit))
      master_head = fetch_info.commit
  if not bare:
    repo.head.reset(commit=master_head, index=True, working_tree=True)
  assert(master_head)
  print("Repo %s has hash %s" % (repo,master_head))
  return (repo, master_head)

def describe(g, commit):
  try:
    res = g.execute(["git", "describe", "--abbrev=7", commit]).replace('-', '~', 1)
    assert(res[0]=='v')
    return res[1:]
  except:
    print("Failed to describe",g.working_dir,commit)
    raise

def recArchiveSubmodule(baseRepo, ftar, repo, commit, prefix):
  print(repo,commit,readable_sha(commit))
  commit=readable_sha(commit)
  repo.head.reset(repo.commit(readable_sha(commit)), working_tree=True)
  repo.archive(ftar, treeish=commit, prefix=prefix+"/")
  for sm in repo.submodules:
    print(sm.module_exists())
    sm_url = sm.url.replace("../", baseRepo + "/")
    (sm_repo,sm_head) = makeRepo(os.path.basename(sm.url), sm_url)
    recArchiveSubmodule(baseRepo, ftar, sm_repo, sm.binsha, prefix + "/" + sm.path)

def makeSource(name, baseRepo, sourceDir, buildScriptsDir, g, repo, commit):
  if len(commit)==40:
    desc = describe(g, commit)
  else:
    desc = commit
    assert(re.search("-g[0-9a-f]{7}$", commit))
    commit = commit[-7:]
  prefix = "%s_%s" % (name.lower(), desc)
  suffixes = [".orig.tar.xz", "-1.dsc", "-1_source.changes", "-1.debian.tar.xz"]
  files = [sourceDir + "/" + prefix + suffix for suffix in suffixes]
  print("Look for files %s" % ", ".join(files))
  if sum(False==os.path.exists(f) for f in files)>0:
    tempdir = tempfile.mkdtemp(prefix="OpenModelica-make-packages.py")
    tar = prefix+".orig.tar"

    with open(os.path.join(tempdir, tar), 'wb') as ftar:
      recArchiveSubmodule(baseRepo, ftar, repo, commit, prefix)
    assert(0==subprocess.call(["tar", "--ignore-zeros", "-C", tempdir, "-xf", os.path.join(tempdir,tar)]))
    #for d in ['testsuite', 'libraries', 'doc', 'OMOptim']:
    #  shutil.rmtree(os.path.join(tempdir,prefix,d))
    assert(0==subprocess.call(["tar", "-C", tempdir, "-cJf", os.path.join(tempdir,tar+".xz"), prefix]))
    debdir = os.path.join(tempdir,prefix,"debian")
    shutil.copytree(os.path.join(buildScriptsDir,name,"debian"), debdir, symlinks=True)
    f = open(os.path.join(debdir, "changelog"), "r+")
    contents = f.read().replace("@REV@", desc).replace("@TIME@", formatdate()).replace("@DISTS@", "precise trusty utopic vivid wheezy")
    f.seek(0)
    f.truncate()
    f.write(contents)
    f.close()
    # Force xz on all platforms (default is changing from gz to xz)
    p = subprocess.Popen(["debuild","-us","-uc","-S","-Zxz"], cwd=os.path.join(tempdir,prefix))
    p.wait()
    assert(p.returncode==0)
    for suffix in suffixes:
      f = os.path.join(sourceDir,prefix+suffix)
      if os.path.exists(f):
        os.unlink(f)
      shutil.move(os.path.join(tempdir,prefix+suffix), f)
      print("Made source file %s" % f)

def updateSources(projects):
  sourceDir = projects["SourceDir"]
  if not os.path.exists(sourceDir):
    os.mkdir(sourceDir)

  for name in projects["Projects"].keys():
    project = projects["Projects"][name]
    if "SeparateSource" in project:
      continue
    if not os.path.exists(name):
      os.mkdir(name)
    repo_dir = os.path.join(name,"repo")
    (repo, head) = makeRepo(repo_dir, os.path.join(baseRepo, project["repo"]))
    g = git.cmd.Git(repo_dir)
    for commit in [project["stable"],project["release"],project["nightly"]]:
      makeSource(name, baseRepo, sourceDir, buildScriptsDir, g, repo, commit)

def updateVMs(projects):
  lvm = projects["LVM-VG"]
  dists = projects["Dists"]
  for dust in dists:
    for chroot in ["%s-%s" % (dust,arch) for arch in archs]:
      armhf = ""
      if chroot.find("armhf") >= 0:
        if dust in noarm:
          continue # Jessie does not support armhf cross-compilation
      chroots = subprocess.check_output(['schroot', '-l']).decode("utf-8").split("\n")
      if ("source:%s"%chroot) not in chroots:
        print("build vg=%s %s" % (lvm,chroot))
        #if chroot.find("armhf") >= 0:
        #  assert(0==subprocess.call(["mk-sbuild", "--vg=%s"%lvm, "--target=armhf", dust]))
        if chroot.find("i386") >= 0:
          assert(0==subprocess.call(["sudo", "mk-sbuild", "--vg=%s"%lvm, "--distro=%s" % ubuntu_or_debian[dust], "--arch=i386", "--personality=linux32", dust]))
        else:
          assert(0==subprocess.call(["sudo", "mk-sbuild", "--vg=%s"%lvm, "--distro=%s" % ubuntu_or_debian[dust], dust]))
        assert(0==subprocess.call(["sudo", "sed", "-i", "s/^#source-root-/source-root-/", "/etc/schroot/chroot.d/sbuild-%s" % chroot]))
      for cmd in [
        # ["apt-key","adv","--keyserver","keyserver.ubuntu.com","--recv-keys","3A59B53664970947"],
        ["apt-get", "-qq", "update"],
        ["apt-get", "dist-upgrade", "-qy"],
        ["apt-get", "install", "-qy", "default-jdk", "aptitude", "git"], # "libopenscenegraph-dev"
        ["apt-get", "clean"],
        ["apt-get", "-y", "autoremove"]
      ]:
        sys.stdout.flush()
        if not (0==subprocess.call(["schroot","--directory=/tmp","-c","source:%s" % chroot,"-u","root","--"] + cmd) or
          0==subprocess.call(["schroot","--directory=/tmp","-c","source:%s" % chroot,"-u","root","--"] + cmd)):
          raise Exception("Failed to run command %s in chroot %s" % (cmd,chroot))

def hasBinaryPackage(sftp, projects, dist, arch, project, version):
  path="%s/contrib-%s/%s_%s-1_%s.deb" % (projects["AptPool"], dist, project.lower(), version, arch)
  return sftp.exists(path)

def sourcePackagePath(projects, project, version, ext="-1.dsc"):
  return "%s/contrib/%s_%s%s" % (projects["AptPool"], project.lower(), version, ext)
def hasSourcePackage(sftp, projects, project, version):
  premote = sourcePackagePath(projects, project, version)
  b = sftp.exists(premote)
  return b
  #if not b:
  #  plocal = sourcePackagePath({"AptBool":projects["SourceDir"]}, project, version)
  #  if os.path.exists(plocal):
  #    for ext in
  #    sftp.put(plocal, premote)
  #    print("Uploaded file %s" % premote)
  #    return True
  #return b

projects = json.load(open("projects.json","r"))

baseRepo = os.path.expanduser(projects["BaseRepo"])
buildScriptsDir = "BuildScripts"
(buildRepo, buildHead) = makeRepo(buildScriptsDir, os.path.join(baseRepo, projects["BuildScripts"]), bare=False)

if not os.path.exists("sources"):
  os.mkdir("sources")
#updateSources(projects)
updateVMs(projects)

keyDebFileRegex = re.compile("^(.*/)?([^_]*)_[^_]*_([a-z0-9]*)[.]deb$")
def keyDebFile(f):
  try:
    m = keyDebFileRegex.search(f)
    return m.group(2)+":"+m.group(3)
  except:
    return f

def reconnect():
  build = os.path.expanduser('~/.ssh/id_rsa_sftp_build')
  return pysftp.Connection('build.openmodelica.org', username='build', private_key=build)
with reconnect() as sftp:
  print(sftp.listdir(projects["AptPool"]))
  for project in sorted(projects["Projects"].keys(), key=lambda k: (projects["Projects"][k]["BuildPriority"],k)):
    p=projects["Projects"][project]
    sourceDir = projects["SourceDir"]
    baseRepo = os.path.expanduser(projects["BaseRepo"])
    buildScriptsDir = "BuildScripts"
    makeRepo(project, os.path.join(baseRepo, project), bare=False)
    for stability in ["release", "stable", "nightly"]:
      version = p[stability]
      repo_dir = project
      g = git.cmd.Git(repo_dir)
      # print("Source %s %s? %s" % (project,version,hasSourcePackage(sftp, projects, project, version)))
      hash = version
      if version is None:
        print("Warning, got version none for %s" % str(p))
      version = version if len(version)!=40 else describe(g, hash)
      if not hasSourcePackage(sftp, projects, project, version):
        if "SeparateSource" in project:
          raise Exception("We assume that the source files have already been created for project %s" % project)
        if not os.path.exists(project):
          os.mkdir(project)
        (repo, head) = makeRepo(repo_dir, os.path.join(baseRepo, p["repo"]))
        makeSource(project, baseRepo, sourceDir, buildScriptsDir, g, repo, hash)
        for ext in ["-1.dsc","-1.debian.tar.xz", ".orig.tar.xz"]:
          flocal = "sources/%s_%s%s" % (project.lower(),version,ext)
          print("Uploading %s" % flocal)
          sftp.put(flocal, sourcePackagePath(projects, project, version, ext=ext))
      else:
        for ext in ["-1.dsc","-1.debian.tar.xz", ".orig.tar.xz"]:
          if not os.path.exists("sources/%s_%s%s" % (project.lower(),version,ext)):
            sftp.get(sourcePackagePath(projects, project, version, ext=ext), "sources/%s_%s%s" % (project.lower(),version,ext))
      for dist in projects["Dists"]:
        #distfiles=projects["ControlFiles"]+("/repos-files/%s-%s" % (dist,stability))
        #distsources=distfiles+".source"
        #lines=sftp.open(distfiles).readlines()
        #debfileshash = {}
        #for l in lines:
        #  debfileshash[keyDebFile(l.strip())] = l.strip()
        ignores = (p.get("Ignore") or [])
        if [dist,version] in ignores:
          print("Skipping %s %s" % (dist,version))
          continue
        for arch in archs:
          if arch=="armhf":
            if dist in noarm:
              continue
          if [arch,version] in ignores or [arch,dist,version] in ignores:
            print("Skipping %s %s %s" % (dist,arch,version))
            continue
          if not hasBinaryPackage(sftp, projects, dist, arch, project, version):
            print("Building %s %s %s %s" % (project,dist,arch,version))
            if os.path.exists("tmpbuilddir"):
              shutil.rmtree("tmpbuilddir")
            os.mkdir("tmpbuilddir")
            extra = list(default_sbuild_flags)
            if "extra-packages" in p:
              for dep in p["extra-packages"][stability] or []:
                dep = dep.replace("$ARCH",arch)
                remotename = "%s/contrib-%s/%s.deb" % (projects["AptPool"], dist, dep)
                print("Fetch binary package %s" % remotename)
                sftp.get(remotename, "tmpbuilddir/%s.deb" % dep)
                extra.append("--extra-package=%s.deb" % dep)
            if "extra-depends" in p:
              extra.append("--add-depends=%s" % p["extra-depends"])
            extra = " ".join(extra)
            #if arch=="armhf":
            #  cmd=["bash", "-c", "cd tmpbuilddir && sbuild --mail-log-to='' %s --host=armhf -A -d %s-amd64-armhf ../sources/%s_%s-1.dsc" % (extra,dist,project.lower(),version)]
            #else:
            cmd=["bash", "-c", "cd tmpbuilddir && sbuild --mail-log-to='' %s -A --host=%s --build=%s -d %s-%s ../sources/%s_%s-1.dsc" % (extra,arch,arch,dist,arch,project.lower(),version)]

            print("%s: Running command: %s" % (datetime.datetime.now().isoformat(), cmd))
            sys.stdout.flush()
            start_time = time.time()
            sftp.close()
            if 0!=subprocess.call(cmd):
              print("Build failed, you will find the logs as an artifact in this Hudson Job")
              sys.exit(1)
        #print("%s: Re-running command with mail log" % datetime.datetime.now().isoformat())
              #cmd[2] = cmd[2].replace("--mail-log-to=''", "")
              #assert(0==subprocess.call(cmd))
            print("Elapsed time %.2f minutes" % ((time.time() - start_time)/60))
            # Sometimes, we timeout when building for a long time...
            sftp = reconnect()
            filesToRename=[]
            for f in glob.glob("tmpbuilddir/*"):
              remotename = "%s/contrib-%s/%s" % (projects["AptPool"], dist, os.path.basename(f))
              if not sftp.exists(remotename):
                sftp.put(f, "%s.tmp" % remotename)
                filesToRename.append(remotename)
              #if f.endswith(".deb"):
              #  debfileshash[keyDebFile(f)] = "pool/contrib-%s/%s" % (dist, os.path.basename(f))
            for f in filesToRename:
              sftp.rename("%s.tmp" % f, f)
        #sftp.open(distfiles, "w").write("\n".join(sorted(debfileshash.values())))

        #lines = sftp.open(distsources).readlines()
        #debsources = []
        #for l in lines:
        #  if re.search(project.lower()+"_", l):
        #    debsources.append("pool/contrib/%s_%s-1.dsc" % (project,version))
        #  else:
        #    debsources.append(l)
        #sftp.open(distsources, "w").write("".join(debsources))
