2017-09-04 20:00:09 -03:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
|
|
|
script to build the latest binaries for each vehicle type, ready to upload
|
|
|
|
Peter Barker, August 2017
|
|
|
|
based on build_binaries.sh by Andrew Tridgell, March 2013
|
2021-03-31 20:53:02 -03:00
|
|
|
|
|
|
|
AP_FLAKE8_CLEAN
|
2017-09-04 20:00:09 -03:00
|
|
|
"""
|
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import optparse
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import time
|
2020-08-27 09:34:29 -03:00
|
|
|
import string
|
2017-09-04 20:00:09 -03:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2019-04-29 05:12:48 -03:00
|
|
|
import gzip
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
# local imports
|
2021-03-31 20:53:02 -03:00
|
|
|
import generate_manifest
|
|
|
|
import gen_stable
|
2020-03-06 22:41:41 -04:00
|
|
|
import build_binaries_history
|
2017-09-04 20:00:09 -03:00
|
|
|
|
2020-07-19 23:54:19 -03:00
|
|
|
if sys.version_info[0] < 3:
|
|
|
|
running_python3 = False
|
|
|
|
else:
|
|
|
|
running_python3 = True
|
|
|
|
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
class build_binaries(object):
|
|
|
|
def __init__(self, tags):
|
|
|
|
self.tags = tags
|
|
|
|
self.dirty = False
|
2020-03-06 22:41:41 -04:00
|
|
|
binaries_history_filepath = os.path.join(self.buildlogs_dirpath(),
|
|
|
|
"build_binaries_history.sqlite")
|
|
|
|
self.history = build_binaries_history.BuildBinariesHistory(binaries_history_filepath)
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
def progress(self, string):
|
|
|
|
'''pretty-print progress'''
|
|
|
|
print("BB: %s" % string)
|
|
|
|
|
|
|
|
def run_git(self, args):
|
|
|
|
'''run git with args git_args; returns git's output'''
|
|
|
|
cmd_list = ["git"]
|
|
|
|
cmd_list.extend(args)
|
|
|
|
return self.run_program("BB-GIT", cmd_list)
|
|
|
|
|
|
|
|
def board_branch_bit(self, board):
|
2018-03-06 22:24:18 -04:00
|
|
|
'''return a fragment which might modify the branch name.
|
|
|
|
this was previously used to have a master-AVR branch etc
|
|
|
|
if the board type was apm1 or apm2'''
|
2017-09-04 20:00:09 -03:00
|
|
|
return None
|
|
|
|
|
|
|
|
def board_options(self, board):
|
|
|
|
'''return board-specific options'''
|
|
|
|
if board == "bebop":
|
|
|
|
return ["--static"]
|
|
|
|
return []
|
|
|
|
|
|
|
|
def run_waf(self, args):
|
|
|
|
if os.path.exists("waf"):
|
|
|
|
waf = "./waf"
|
|
|
|
else:
|
|
|
|
waf = os.path.join(".", "modules", "waf", "waf-light")
|
|
|
|
cmd_list = [waf]
|
|
|
|
cmd_list.extend(args)
|
|
|
|
self.run_program("BB-WAF", cmd_list)
|
|
|
|
|
2018-07-04 20:15:59 -03:00
|
|
|
def run_program(self, prefix, cmd_list, show_output=True):
|
|
|
|
if show_output:
|
|
|
|
self.progress("Running (%s)" % " ".join(cmd_list))
|
2017-09-04 20:00:09 -03:00
|
|
|
p = subprocess.Popen(cmd_list, bufsize=1, stdin=None,
|
|
|
|
stdout=subprocess.PIPE, close_fds=True,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
output = ""
|
|
|
|
while True:
|
|
|
|
x = p.stdout.readline()
|
|
|
|
if len(x) == 0:
|
2018-03-01 00:21:16 -04:00
|
|
|
returncode = os.waitpid(p.pid, 0)
|
|
|
|
if returncode:
|
2017-09-04 20:00:09 -03:00
|
|
|
break
|
|
|
|
# select not available on Windows... probably...
|
2018-03-01 00:21:16 -04:00
|
|
|
time.sleep(0.1)
|
|
|
|
continue
|
2020-07-19 23:54:19 -03:00
|
|
|
if running_python3:
|
2020-08-27 09:34:29 -03:00
|
|
|
x = bytearray(x)
|
|
|
|
x = filter(lambda x : chr(x) in string.printable, x)
|
|
|
|
x = "".join([chr(c) for c in x])
|
2017-09-04 20:00:09 -03:00
|
|
|
output += x
|
|
|
|
x = x.rstrip()
|
2018-07-04 20:15:59 -03:00
|
|
|
if show_output:
|
|
|
|
print("%s: %s" % (prefix, x))
|
2018-03-01 00:21:16 -04:00
|
|
|
(_, status) = returncode
|
2018-07-04 20:15:59 -03:00
|
|
|
if status != 0 and show_output:
|
2018-03-01 00:21:16 -04:00
|
|
|
self.progress("Process failed (%s)" %
|
|
|
|
str(returncode))
|
|
|
|
raise subprocess.CalledProcessError(
|
|
|
|
returncode, cmd_list)
|
2017-09-04 20:00:09 -03:00
|
|
|
return output
|
|
|
|
|
|
|
|
def run_make(self, args):
|
|
|
|
cmd_list = ["make"]
|
|
|
|
cmd_list.extend(args)
|
|
|
|
self.run_program("BB-MAKE", cmd_list)
|
|
|
|
|
|
|
|
def run_git_update_submodules(self):
|
|
|
|
'''if submodules are present initialise and update them'''
|
|
|
|
if os.path.exists(os.path.join(self.basedir, ".gitmodules")):
|
|
|
|
self.run_git(["submodule",
|
|
|
|
"update",
|
|
|
|
"--init",
|
|
|
|
"--recursive",
|
|
|
|
"-f"])
|
|
|
|
|
2019-06-03 04:53:30 -03:00
|
|
|
def checkout(self, vehicle, ctag, cboard=None, cframe=None, submodule_update=True):
|
2017-09-04 20:00:09 -03:00
|
|
|
'''attempt to check out a git tree. Various permutations are
|
|
|
|
attempted based on ctag - for examplle, if the board is avr and ctag
|
|
|
|
is bob we will attempt to checkout bob-AVR'''
|
|
|
|
if self.dirty:
|
|
|
|
self.progress("Skipping checkout for dirty build")
|
|
|
|
return True
|
|
|
|
|
|
|
|
self.progress("Trying checkout %s %s %s %s" %
|
|
|
|
(vehicle, ctag, cboard, cframe))
|
|
|
|
self.run_git(['stash'])
|
|
|
|
if ctag == "latest":
|
|
|
|
vtag = "master"
|
|
|
|
else:
|
2020-04-14 21:54:38 -03:00
|
|
|
tagvehicle = vehicle
|
|
|
|
if tagvehicle == "Rover":
|
|
|
|
# FIXME: Rover tags in git still named APMrover2 :-(
|
|
|
|
tagvehicle = "APMrover2"
|
|
|
|
vtag = "%s-%s" % (tagvehicle, ctag)
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
branches = []
|
|
|
|
if cframe is not None:
|
|
|
|
# try frame specific tag
|
|
|
|
branches.append("%s-%s" % (vtag, cframe))
|
|
|
|
if cboard is not None:
|
|
|
|
bbb = self.board_branch_bit(cboard)
|
|
|
|
if bbb is not None:
|
|
|
|
# try board type specific branch extension
|
|
|
|
branches.append("".join([vtag, bbb]))
|
|
|
|
branches.append(vtag)
|
|
|
|
|
|
|
|
for branch in branches:
|
|
|
|
try:
|
|
|
|
self.progress("Trying branch %s" % branch)
|
|
|
|
self.run_git(["checkout", "-f", branch])
|
2019-06-03 04:53:30 -03:00
|
|
|
if submodule_update:
|
|
|
|
self.run_git_update_submodules()
|
2017-09-04 20:00:09 -03:00
|
|
|
self.run_git(["log", "-1"])
|
|
|
|
return True
|
2019-04-29 05:12:48 -03:00
|
|
|
except subprocess.CalledProcessError:
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("Checkout branch %s failed" % branch)
|
|
|
|
|
|
|
|
self.progress("Failed to find tag for %s %s %s %s" %
|
|
|
|
(vehicle, ctag, cboard, cframe))
|
|
|
|
return False
|
|
|
|
|
|
|
|
def skip_board_waf(self, board):
|
|
|
|
'''check if we should skip this build because we don't support the
|
|
|
|
board in this release
|
|
|
|
'''
|
|
|
|
|
|
|
|
try:
|
2018-07-04 20:15:59 -03:00
|
|
|
out = self.run_program('waf', ['./waf', 'configure', '--board=BOARDTEST'], False)
|
|
|
|
lines = out.split('\n')
|
2019-03-11 01:55:57 -03:00
|
|
|
needles = ["BOARDTEST' (choose from", "BOARDTEST': choices are"]
|
2018-07-04 20:15:59 -03:00
|
|
|
for line in lines:
|
2019-03-11 01:55:57 -03:00
|
|
|
for needle in needles:
|
|
|
|
idx = line.find(needle)
|
|
|
|
if idx != -1:
|
|
|
|
break
|
2018-07-04 20:15:59 -03:00
|
|
|
if idx != -1:
|
|
|
|
line = line[idx+len(needle):-1]
|
2019-04-29 05:12:48 -03:00
|
|
|
line = line.replace("'", "")
|
|
|
|
line = line.replace(" ", "")
|
2018-07-04 20:15:59 -03:00
|
|
|
boards = line.split(",")
|
2019-04-29 05:12:48 -03:00
|
|
|
return board not in boards
|
2017-09-04 20:00:09 -03:00
|
|
|
except IOError as e:
|
|
|
|
if e.errno != 2:
|
|
|
|
raise
|
2018-04-10 00:00:55 -03:00
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("Skipping unsupported board %s" % (board,))
|
|
|
|
return True
|
|
|
|
|
|
|
|
def skip_frame(self, board, frame):
|
|
|
|
'''returns true if this board/frame combination should not be built'''
|
|
|
|
if frame == "heli":
|
2019-09-12 00:08:43 -03:00
|
|
|
if board in ["bebop", "aerofc-v1", "skyviper-v2450", "CubeSolo", "CubeGreen-solo", 'skyviper-journey']:
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("Skipping heli build for %s" % board)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def first_line_of_filepath(self, filepath):
|
|
|
|
'''returns the first (text) line from filepath'''
|
|
|
|
with open(filepath) as fh:
|
|
|
|
line = fh.readline()
|
|
|
|
return line
|
|
|
|
|
|
|
|
def skip_build(self, buildtag, builddir):
|
|
|
|
'''check if we should skip this build because we have already built
|
|
|
|
this version
|
|
|
|
'''
|
|
|
|
|
|
|
|
if os.getenv("FORCE_BUILD", False):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not os.path.exists(os.path.join(self.basedir, '.gitmodules')):
|
|
|
|
self.progress("Skipping build without submodules")
|
|
|
|
return True
|
|
|
|
|
|
|
|
bname = os.path.basename(builddir)
|
|
|
|
ldir = os.path.join(os.path.dirname(os.path.dirname(
|
|
|
|
os.path.dirname(builddir))), buildtag, bname) # FIXME: WTF
|
|
|
|
|
|
|
|
oldversion_filepath = os.path.join(ldir, "git-version.txt")
|
|
|
|
if not os.path.exists(oldversion_filepath):
|
|
|
|
self.progress("%s doesn't exist - building" % oldversion_filepath)
|
|
|
|
return False
|
|
|
|
|
|
|
|
oldversion = self.first_line_of_filepath(oldversion_filepath)
|
|
|
|
newversion = self.run_git(["log", "-1"])
|
|
|
|
newversion = newversion.splitlines()[0]
|
|
|
|
oldversion = oldversion.rstrip()
|
|
|
|
newversion = newversion.rstrip()
|
|
|
|
self.progress("oldversion=%s newversion=%s" %
|
|
|
|
(oldversion, newversion,))
|
|
|
|
if oldversion == newversion:
|
|
|
|
self.progress("Skipping build - version match (%s)" %
|
|
|
|
(newversion,))
|
|
|
|
return True
|
|
|
|
|
|
|
|
self.progress("%s needs rebuild" % (ldir,))
|
|
|
|
return False
|
|
|
|
|
|
|
|
def write_string_to_filepath(self, string, filepath):
|
|
|
|
'''writes the entirety of string to filepath'''
|
|
|
|
with open(filepath, "w") as x:
|
|
|
|
x.write(string)
|
|
|
|
|
2019-10-24 22:12:44 -03:00
|
|
|
def version_h_path(self, src):
|
|
|
|
'''return path to version.h'''
|
|
|
|
if src == 'AP_Periph':
|
|
|
|
return os.path.join('Tools', src, "version.h")
|
|
|
|
return os.path.join(src, "version.h")
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def addfwversion_gitversion(self, destdir, src):
|
|
|
|
# create git-version.txt:
|
|
|
|
gitlog = self.run_git(["log", "-1"])
|
|
|
|
gitversion_filepath = os.path.join(destdir, "git-version.txt")
|
|
|
|
gitversion_content = gitlog
|
2019-10-24 22:12:44 -03:00
|
|
|
versionfile = self.version_h_path(src)
|
2017-09-04 20:00:09 -03:00
|
|
|
if os.path.exists(versionfile):
|
|
|
|
content = self.read_string_from_filepath(versionfile)
|
|
|
|
match = re.search('define.THISFIRMWARE "([^"]+)"', content)
|
|
|
|
if match is None:
|
|
|
|
self.progress("Failed to retrieve THISFIRMWARE from version.h")
|
|
|
|
self.progress("Content: (%s)" % content)
|
2018-02-28 22:53:57 -04:00
|
|
|
self.progress("Writing version info to %s" %
|
|
|
|
(gitversion_filepath,))
|
2017-09-04 20:00:09 -03:00
|
|
|
gitversion_content += "\nAPMVERSION: %s\n" % (match.group(1))
|
|
|
|
else:
|
|
|
|
self.progress("%s does not exist" % versionfile)
|
|
|
|
|
|
|
|
self.write_string_to_filepath(gitversion_content, gitversion_filepath)
|
|
|
|
|
|
|
|
def addfwversion_firmwareversiontxt(self, destdir, src):
|
|
|
|
# create firmware-version.txt
|
2019-10-24 22:12:44 -03:00
|
|
|
versionfile = self.version_h_path(src)
|
2017-09-04 20:00:09 -03:00
|
|
|
if not os.path.exists(versionfile):
|
|
|
|
self.progress("%s does not exist" % (versionfile,))
|
2018-02-28 22:53:57 -04:00
|
|
|
return
|
2021-03-31 20:53:02 -03:00
|
|
|
ss = r".*define +FIRMWARE_VERSION[ ]+(?P<major>\d+)[ ]*,[ ]*" \
|
|
|
|
r"(?P<minor>\d+)[ ]*,[ ]*(?P<point>\d+)[ ]*,[ ]*" \
|
|
|
|
r"(?P<type>[A-Z_]+)[ ]*"
|
2017-09-04 20:00:09 -03:00
|
|
|
content = self.read_string_from_filepath(versionfile)
|
|
|
|
match = re.search(ss, content)
|
|
|
|
if match is None:
|
|
|
|
self.progress("Failed to retrieve FIRMWARE_VERSION from version.h")
|
|
|
|
self.progress("Content: (%s)" % content)
|
|
|
|
return
|
|
|
|
ver = "%d.%d.%d-%s\n" % (int(match.group("major")),
|
|
|
|
int(match.group("minor")),
|
|
|
|
int(match.group("point")),
|
|
|
|
match.group("type"))
|
|
|
|
firmware_version_filepath = "firmware-version.txt"
|
|
|
|
self.progress("Writing version (%s) to %s" %
|
|
|
|
(ver, firmware_version_filepath,))
|
|
|
|
self.write_string_to_filepath(
|
|
|
|
ver, os.path.join(destdir, firmware_version_filepath))
|
|
|
|
|
|
|
|
def addfwversion(self, destdir, src):
|
|
|
|
'''write version information into destdir'''
|
|
|
|
self.addfwversion_gitversion(destdir, src)
|
|
|
|
self.addfwversion_firmwareversiontxt(destdir, src)
|
|
|
|
|
|
|
|
def read_string_from_filepath(self, filepath):
|
|
|
|
'''returns content of filepath as a string'''
|
|
|
|
with open(filepath, 'rb') as fh:
|
|
|
|
content = fh.read()
|
|
|
|
return content
|
|
|
|
|
|
|
|
def string_in_filepath(self, string, filepath):
|
|
|
|
'''returns true if string exists in the contents of filepath'''
|
|
|
|
return string in self.read_string_from_filepath(filepath)
|
|
|
|
|
2018-03-20 20:07:40 -03:00
|
|
|
def mkpath(self, path):
|
|
|
|
'''make directory path and all elements leading to it'''
|
|
|
|
'''distutils.dir_util.mkpath was playing up'''
|
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != 17: # EEXIST
|
|
|
|
raise e
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def copyit(self, afile, adir, tag, src):
|
|
|
|
'''copies afile into various places, adding metadata'''
|
|
|
|
bname = os.path.basename(adir)
|
|
|
|
tdir = os.path.join(os.path.dirname(os.path.dirname(
|
|
|
|
os.path.dirname(adir))), tag, bname)
|
|
|
|
if tag == "latest":
|
|
|
|
# we keep a permanent archive of all "latest" builds,
|
|
|
|
# their path including a build timestamp:
|
2018-03-20 20:07:40 -03:00
|
|
|
self.mkpath(adir)
|
2018-02-28 22:53:57 -04:00
|
|
|
self.progress("Copying %s to %s" % (afile, adir,))
|
2017-09-04 20:00:09 -03:00
|
|
|
shutil.copy(afile, adir)
|
|
|
|
self.addfwversion(adir, src)
|
|
|
|
# the most recent build of every tag is kept around:
|
|
|
|
self.progress("Copying %s to %s" % (afile, tdir))
|
2018-03-20 20:07:40 -03:00
|
|
|
self.mkpath(tdir)
|
2017-09-04 20:00:09 -03:00
|
|
|
self.addfwversion(tdir, src)
|
|
|
|
shutil.copy(afile, tdir)
|
|
|
|
|
|
|
|
def touch_filepath(self, filepath):
|
|
|
|
'''creates a file at filepath, or updates the timestamp on filepath'''
|
|
|
|
if os.path.exists(filepath):
|
|
|
|
os.utime(filepath, None)
|
|
|
|
else:
|
|
|
|
with open(filepath, "a"):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def build_vehicle(self, tag, vehicle, boards, vehicle_binaries_subdir,
|
|
|
|
binaryname, px4_binaryname, frames=[None]):
|
|
|
|
'''build vehicle binaries'''
|
|
|
|
self.progress("Building %s %s binaries (cwd=%s)" %
|
|
|
|
(vehicle, tag, os.getcwd()))
|
|
|
|
|
2020-02-05 21:07:13 -04:00
|
|
|
board_count = len(boards)
|
|
|
|
count = 0
|
|
|
|
for board in sorted(boards, key=str.lower):
|
2019-05-30 18:36:16 -03:00
|
|
|
now = datetime.datetime.now()
|
2020-02-05 21:07:13 -04:00
|
|
|
count += 1
|
|
|
|
self.progress("[%u/%u] Building board: %s at %s" %
|
|
|
|
(count, board_count, board, str(now)))
|
2017-09-04 20:00:09 -03:00
|
|
|
for frame in frames:
|
|
|
|
if frame is not None:
|
|
|
|
self.progress("Considering frame %s for board %s" %
|
|
|
|
(frame, board))
|
|
|
|
if frame is None:
|
|
|
|
framesuffix = ""
|
|
|
|
else:
|
|
|
|
framesuffix = "-%s" % frame
|
2019-06-03 04:53:30 -03:00
|
|
|
if not self.checkout(vehicle, tag, board, frame, submodule_update=False):
|
2018-03-06 22:34:36 -04:00
|
|
|
msg = ("Failed checkout of %s %s %s %s" %
|
|
|
|
(vehicle, board, tag, frame,))
|
|
|
|
self.progress(msg)
|
|
|
|
self.error_strings.append(msg)
|
2017-09-04 20:00:09 -03:00
|
|
|
continue
|
2019-06-03 04:53:30 -03:00
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("Building %s %s %s binaries %s" %
|
|
|
|
(vehicle, tag, board, frame))
|
|
|
|
ddir = os.path.join(self.binaries,
|
|
|
|
vehicle_binaries_subdir,
|
|
|
|
self.hdate_ym,
|
|
|
|
self.hdate_ymdhm,
|
|
|
|
"".join([board, framesuffix]))
|
|
|
|
if self.skip_build(tag, ddir):
|
|
|
|
continue
|
|
|
|
if self.skip_frame(board, frame):
|
|
|
|
continue
|
2018-04-09 23:48:21 -03:00
|
|
|
|
2019-06-03 04:53:30 -03:00
|
|
|
# we do the submodule update after the skip_board_waf check to avoid doing it on
|
|
|
|
# builds we will not be running
|
|
|
|
self.run_git_update_submodules()
|
|
|
|
|
2019-06-03 08:48:27 -03:00
|
|
|
if self.skip_board_waf(board):
|
|
|
|
continue
|
2020-12-12 16:16:42 -04:00
|
|
|
|
2018-06-21 23:40:54 -03:00
|
|
|
if os.path.exists(self.buildroot):
|
|
|
|
shutil.rmtree(self.buildroot)
|
|
|
|
|
2019-04-29 05:12:48 -03:00
|
|
|
self.remove_tmpdir()
|
2018-04-09 23:48:21 -03:00
|
|
|
|
2020-03-06 22:41:41 -04:00
|
|
|
githash = self.run_git(["rev-parse", "HEAD"]).rstrip()
|
|
|
|
|
2019-06-03 04:53:30 -03:00
|
|
|
t0 = time.time()
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("Configuring for %s in %s" %
|
|
|
|
(board, self.buildroot))
|
|
|
|
try:
|
|
|
|
waf_opts = ["configure",
|
|
|
|
"--board", board,
|
|
|
|
"--out", self.buildroot,
|
|
|
|
"clean"]
|
|
|
|
waf_opts.extend(self.board_options(board))
|
|
|
|
self.run_waf(waf_opts)
|
2019-04-29 05:12:48 -03:00
|
|
|
except subprocess.CalledProcessError:
|
2017-09-04 20:00:09 -03:00
|
|
|
self.progress("waf configure failed")
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
target = os.path.join("bin",
|
|
|
|
"".join([binaryname, framesuffix]))
|
|
|
|
self.run_waf(["build", "--targets", target])
|
2019-04-29 05:12:48 -03:00
|
|
|
except subprocess.CalledProcessError:
|
2018-03-06 22:34:36 -04:00
|
|
|
msg = ("Failed build of %s %s%s %s" %
|
|
|
|
(vehicle, board, framesuffix, tag))
|
|
|
|
self.progress(msg)
|
|
|
|
self.error_strings.append(msg)
|
2020-03-06 22:41:41 -04:00
|
|
|
# record some history about this build
|
|
|
|
t1 = time.time()
|
|
|
|
time_taken_to_build = t1-t0
|
|
|
|
self.history.record_build(githash, tag, vehicle, board, frame, None, t0, time_taken_to_build)
|
2017-09-04 20:00:09 -03:00
|
|
|
continue
|
|
|
|
|
2019-06-03 04:53:30 -03:00
|
|
|
t1 = time.time()
|
2020-03-06 22:41:41 -04:00
|
|
|
time_taken_to_build = t1-t0
|
2019-06-03 04:53:30 -03:00
|
|
|
self.progress("Building %s %s %s %s took %u seconds" %
|
2020-03-06 22:41:41 -04:00
|
|
|
(vehicle, tag, board, frame, time_taken_to_build))
|
2019-06-03 04:53:30 -03:00
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
bare_path = os.path.join(self.buildroot,
|
|
|
|
board,
|
|
|
|
"bin",
|
|
|
|
"".join([binaryname, framesuffix]))
|
2018-03-20 02:08:08 -03:00
|
|
|
files_to_copy = []
|
2019-10-24 21:57:15 -03:00
|
|
|
extensions = [".px4", ".apj", ".abin", "_with_bl.hex", ".hex"]
|
|
|
|
if vehicle == 'AP_Periph':
|
|
|
|
# need bin file for uavcan-gui-tool and MissionPlanner
|
|
|
|
extensions.append('.bin')
|
|
|
|
for extension in extensions:
|
2018-03-20 02:08:08 -03:00
|
|
|
filepath = "".join([bare_path, extension])
|
|
|
|
if os.path.exists(filepath):
|
|
|
|
files_to_copy.append(filepath)
|
2020-03-06 22:41:41 -04:00
|
|
|
if not os.path.exists(bare_path):
|
|
|
|
raise Exception("No elf file?!")
|
2018-05-10 03:49:22 -03:00
|
|
|
# only copy the elf if we don't have other files to copy
|
2020-03-06 22:41:41 -04:00
|
|
|
if len(files_to_copy) == 0:
|
2018-05-10 03:49:22 -03:00
|
|
|
files_to_copy.append(bare_path)
|
2018-03-20 02:08:08 -03:00
|
|
|
|
|
|
|
for path in files_to_copy:
|
|
|
|
try:
|
|
|
|
self.copyit(path, ddir, tag, vehicle)
|
|
|
|
except Exception as e:
|
|
|
|
self.progress("Failed to copy %s to %s: %s" % (path, ddir, str(e)))
|
2017-09-04 20:00:09 -03:00
|
|
|
# why is touching this important? -pb20170816
|
|
|
|
self.touch_filepath(os.path.join(self.binaries,
|
|
|
|
vehicle_binaries_subdir, tag))
|
|
|
|
|
2020-03-06 22:41:41 -04:00
|
|
|
# record some history about this build
|
|
|
|
self.history.record_build(githash, tag, vehicle, board, frame, bare_path, t0, time_taken_to_build)
|
|
|
|
|
2018-12-28 01:19:25 -04:00
|
|
|
if not self.checkout(vehicle, tag, "PX4", None):
|
|
|
|
self.checkout(vehicle, "latest")
|
|
|
|
return
|
|
|
|
|
2018-12-28 01:08:38 -04:00
|
|
|
board_list = self.run_program('BB-WAF', ['./waf', 'list_boards'])
|
2018-12-28 01:19:25 -04:00
|
|
|
board_list = board_list.split(' ')
|
|
|
|
self.checkout(vehicle, "latest")
|
2019-04-29 05:12:48 -03:00
|
|
|
if 'px4-v2' not in board_list:
|
2018-12-28 01:08:38 -04:00
|
|
|
print("Skipping px4 builds")
|
2018-12-28 01:19:25 -04:00
|
|
|
return
|
2018-12-28 01:08:38 -04:00
|
|
|
|
|
|
|
# PX4-building
|
|
|
|
board = "px4"
|
|
|
|
for frame in frames:
|
|
|
|
self.progress("Building frame %s for board %s" % (frame, board))
|
|
|
|
if frame is None:
|
|
|
|
framesuffix = ""
|
|
|
|
else:
|
|
|
|
framesuffix = "-%s" % frame
|
|
|
|
|
|
|
|
if not self.checkout(vehicle, tag, "PX4", frame):
|
|
|
|
msg = ("Failed checkout of %s %s %s %s" %
|
|
|
|
(vehicle, "PX4", tag, frame))
|
|
|
|
self.progress(msg)
|
|
|
|
self.error_strings.append(msg)
|
|
|
|
self.checkout(vehicle, "latest")
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
deadwood = "../Build.%s" % vehicle
|
|
|
|
if os.path.exists(deadwood):
|
|
|
|
self.progress("#### Removing (%s)" % deadwood)
|
|
|
|
shutil.rmtree(os.path.join(deadwood))
|
|
|
|
except Exception as e:
|
|
|
|
self.progress("FIXME: narrow exception (%s)" % repr(e))
|
|
|
|
|
|
|
|
self.progress("Building %s %s PX4%s binaries" %
|
|
|
|
(vehicle, tag, framesuffix))
|
|
|
|
ddir = os.path.join(self.binaries,
|
|
|
|
vehicle_binaries_subdir,
|
|
|
|
self.hdate_ym,
|
|
|
|
self.hdate_ymdhm,
|
|
|
|
"".join(["PX4", framesuffix]))
|
|
|
|
if self.skip_build(tag, ddir):
|
|
|
|
continue
|
|
|
|
|
|
|
|
for v in ["v1", "v2", "v3", "v4", "v4pro"]:
|
|
|
|
px4_v = "%s-%s" % (board, v)
|
|
|
|
|
|
|
|
if self.skip_board_waf(px4_v):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if os.path.exists(self.buildroot):
|
|
|
|
shutil.rmtree(self.buildroot)
|
|
|
|
|
|
|
|
self.progress("Configuring for %s in %s" %
|
|
|
|
(px4_v, self.buildroot))
|
|
|
|
try:
|
|
|
|
self.run_waf(["configure", "--board", px4_v,
|
|
|
|
"--out", self.buildroot, "clean"])
|
2019-04-29 05:12:48 -03:00
|
|
|
except subprocess.CalledProcessError:
|
2018-12-28 01:08:38 -04:00
|
|
|
self.progress("waf configure failed")
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
self.run_waf([
|
|
|
|
"build",
|
|
|
|
"--targets",
|
|
|
|
os.path.join("bin",
|
|
|
|
"".join([binaryname, framesuffix]))])
|
2019-04-29 05:12:48 -03:00
|
|
|
except subprocess.CalledProcessError:
|
2018-12-28 01:08:38 -04:00
|
|
|
msg = ("Failed build of %s %s%s %s for %s" %
|
|
|
|
(vehicle, board, framesuffix, tag, v))
|
|
|
|
self.progress(msg)
|
|
|
|
self.error_strings.append(msg)
|
|
|
|
continue
|
|
|
|
|
|
|
|
oldfile = os.path.join(self.buildroot, px4_v, "bin",
|
|
|
|
"%s%s.px4" % (binaryname, framesuffix))
|
|
|
|
newfile = "%s-%s.px4" % (px4_binaryname, v)
|
|
|
|
self.progress("Copying (%s) to (%s)" % (oldfile, newfile,))
|
|
|
|
try:
|
|
|
|
shutil.copyfile(oldfile, newfile)
|
|
|
|
except Exception as e:
|
|
|
|
self.progress("FIXME: narrow exception (%s)" % repr(e))
|
|
|
|
msg = ("Failed build copy of %s PX4%s %s for %s" %
|
|
|
|
(vehicle, framesuffix, tag, v))
|
|
|
|
self.progress(msg)
|
|
|
|
self.error_strings.append(msg)
|
|
|
|
continue
|
|
|
|
# FIXME: why the two stage copy?!
|
|
|
|
self.copyit(newfile, ddir, tag, vehicle)
|
2017-09-04 20:00:09 -03:00
|
|
|
self.checkout(vehicle, "latest")
|
|
|
|
|
|
|
|
def common_boards(self):
|
|
|
|
'''returns list of boards common to all vehicles'''
|
2018-04-07 04:59:26 -03:00
|
|
|
return ["fmuv2",
|
|
|
|
"fmuv3",
|
2018-06-07 19:28:23 -03:00
|
|
|
"fmuv5",
|
2018-04-07 04:59:26 -03:00
|
|
|
"mindpx-v2",
|
|
|
|
"erlebrain2",
|
2021-06-08 12:44:09 -03:00
|
|
|
"navigator",
|
2018-04-07 04:59:26 -03:00
|
|
|
"navio",
|
|
|
|
"navio2",
|
2019-04-05 13:49:00 -03:00
|
|
|
"edge",
|
2018-04-07 04:59:26 -03:00
|
|
|
"pxf",
|
2018-06-23 06:29:27 -03:00
|
|
|
"pxfmini",
|
|
|
|
"KakuteF4",
|
2018-07-12 21:00:56 -03:00
|
|
|
"KakuteF7",
|
2019-10-09 06:03:20 -03:00
|
|
|
"KakuteF7Mini",
|
2021-05-05 13:25:11 -03:00
|
|
|
"KakuteF4Mini",
|
2020-12-12 16:16:42 -04:00
|
|
|
"MambaF405v2",
|
2018-06-23 06:29:27 -03:00
|
|
|
"MatekF405",
|
2021-01-27 17:12:58 -04:00
|
|
|
"MatekF405-bdshot",
|
2018-11-10 16:06:00 -04:00
|
|
|
"MatekF405-STD",
|
2018-06-23 06:29:27 -03:00
|
|
|
"MatekF405-Wing",
|
2019-07-11 05:12:28 -03:00
|
|
|
"MatekF765-Wing",
|
2021-02-03 03:11:51 -04:00
|
|
|
"MatekF405-CAN",
|
2020-04-14 00:17:31 -03:00
|
|
|
"MatekH743",
|
2021-01-27 17:12:58 -04:00
|
|
|
"MatekH743-bdshot",
|
2018-06-23 06:29:27 -03:00
|
|
|
"OMNIBUSF7V2",
|
|
|
|
"sparky2",
|
2019-08-06 21:26:34 -03:00
|
|
|
"omnibusf4",
|
2018-06-23 06:29:27 -03:00
|
|
|
"omnibusf4pro",
|
2021-01-27 17:12:58 -04:00
|
|
|
"omnibusf4pro-bdshot",
|
2018-11-04 07:02:05 -04:00
|
|
|
"omnibusf4v6",
|
2018-07-31 21:59:49 -03:00
|
|
|
"OmnibusNanoV6",
|
2021-01-27 17:12:58 -04:00
|
|
|
"OmnibusNanoV6-bdshot",
|
2018-06-23 06:29:27 -03:00
|
|
|
"mini-pix",
|
|
|
|
"airbotf4",
|
2018-07-04 07:36:56 -03:00
|
|
|
"revo-mini",
|
2021-04-08 04:34:53 -03:00
|
|
|
"revo-mini-bdshot",
|
|
|
|
"revo-mini-i2c",
|
|
|
|
"revo-mini-i2c-bdshot",
|
2018-07-10 03:28:59 -03:00
|
|
|
"CubeBlack",
|
2019-09-10 06:03:19 -03:00
|
|
|
"CubeBlack+",
|
2019-01-10 06:33:05 -04:00
|
|
|
"CubePurple",
|
2018-07-26 21:02:01 -03:00
|
|
|
"Pixhawk1",
|
2019-09-29 17:03:20 -03:00
|
|
|
"Pixhawk1-1M",
|
2018-07-10 03:28:59 -03:00
|
|
|
"Pixhawk4",
|
2020-06-11 19:52:41 -03:00
|
|
|
"Pix32v5",
|
2018-07-10 03:28:59 -03:00
|
|
|
"PH4-mini",
|
2018-07-11 22:28:18 -03:00
|
|
|
"CUAVv5",
|
2019-04-08 06:59:59 -03:00
|
|
|
"CUAVv5Nano",
|
2020-02-11 23:55:32 -04:00
|
|
|
"CUAV-Nora",
|
2020-04-11 02:37:24 -03:00
|
|
|
"CUAV-X7",
|
2021-01-27 17:12:58 -04:00
|
|
|
"CUAV-X7-bdshot",
|
2018-07-11 22:28:18 -03:00
|
|
|
"mRoX21",
|
2018-07-18 08:11:44 -03:00
|
|
|
"Pixracer",
|
2021-01-27 17:12:58 -04:00
|
|
|
"Pixracer-bdshot",
|
2018-10-15 19:37:16 -03:00
|
|
|
"F4BY",
|
2018-10-15 19:40:57 -03:00
|
|
|
"mRoX21-777",
|
2019-06-06 18:43:44 -03:00
|
|
|
"mRoControlZeroF7",
|
2020-04-22 05:26:44 -03:00
|
|
|
"mRoNexus",
|
2020-05-21 21:38:43 -03:00
|
|
|
"mRoPixracerPro",
|
2021-01-27 17:12:58 -04:00
|
|
|
"mRoPixracerPro-bdshot",
|
2020-11-04 21:52:07 -04:00
|
|
|
"mRoControlZeroOEMH7",
|
2021-06-04 00:04:41 -03:00
|
|
|
"mRoControlZeroClassic",
|
|
|
|
"mRoControlZeroH7",
|
|
|
|
"mRoControlZeroH7-bdshot",
|
2018-10-15 19:40:57 -03:00
|
|
|
"F35Lightning",
|
2018-10-20 19:21:42 -03:00
|
|
|
"speedybeef4",
|
2020-01-17 18:43:46 -04:00
|
|
|
"SuccexF4",
|
2019-01-23 18:14:16 -04:00
|
|
|
"DrotekP3Pro",
|
|
|
|
"VRBrain-v51",
|
|
|
|
"VRBrain-v52",
|
|
|
|
"VRUBrain-v51",
|
|
|
|
"VRCore-v10",
|
2019-02-21 19:28:53 -04:00
|
|
|
"VRBrain-v54",
|
2019-02-26 03:23:13 -04:00
|
|
|
"TBS-Colibri-F7",
|
2019-07-31 23:57:10 -03:00
|
|
|
"Durandal",
|
2021-01-27 17:12:58 -04:00
|
|
|
"Durandal-bdshot",
|
2019-03-10 21:39:45 -03:00
|
|
|
"CubeOrange",
|
2021-01-27 17:12:58 -04:00
|
|
|
"CubeOrange-bdshot",
|
2019-03-14 05:40:35 -03:00
|
|
|
"CubeYellow",
|
2020-01-17 17:36:18 -04:00
|
|
|
"R9Pilot",
|
2020-09-27 00:14:09 -03:00
|
|
|
"QioTekZealotF427",
|
2021-01-05 18:02:54 -04:00
|
|
|
"BeastH7",
|
|
|
|
"BeastF7",
|
2021-03-16 09:27:08 -03:00
|
|
|
"FlywooF745",
|
2021-04-16 03:41:48 -03:00
|
|
|
"luminousbee5",
|
2019-03-14 05:40:35 -03:00
|
|
|
# SITL targets
|
|
|
|
"SITL_x86_64_linux_gnu",
|
|
|
|
"SITL_arm_linux_gnueabihf",
|
|
|
|
]
|
2017-09-04 20:00:09 -03:00
|
|
|
|
2019-10-24 08:35:47 -03:00
|
|
|
def AP_Periph_boards(self):
|
|
|
|
'''returns list of boards for AP_Periph'''
|
|
|
|
return ["f103-GPS",
|
|
|
|
"f103-ADSB",
|
|
|
|
"f103-RangeFinder",
|
2019-10-31 08:02:29 -03:00
|
|
|
"f303-GPS",
|
2020-01-24 20:01:14 -04:00
|
|
|
"f303-Universal",
|
2020-02-16 21:49:34 -04:00
|
|
|
"f303-M10025",
|
2020-04-29 18:59:05 -03:00
|
|
|
"f303-M10070",
|
2020-09-08 17:38:05 -03:00
|
|
|
"f303-MatekGPS",
|
2021-02-03 03:11:51 -04:00
|
|
|
"f405-MatekGPS",
|
2020-09-08 17:38:05 -03:00
|
|
|
"f103-Airspeed",
|
2019-10-24 08:35:47 -03:00
|
|
|
"CUAV_GPS",
|
|
|
|
"ZubaxGNSS",
|
2020-07-30 16:15:04 -03:00
|
|
|
"CubeOrange-periph",
|
2020-11-12 20:33:41 -04:00
|
|
|
"CubeBlack-periph",
|
2021-02-05 04:13:57 -04:00
|
|
|
"MatekH743-periph",
|
2020-10-09 06:30:38 -03:00
|
|
|
"HitecMosaic",
|
2021-04-15 20:15:28 -03:00
|
|
|
"FreeflyRTK",
|
2021-05-30 06:34:27 -03:00
|
|
|
"HolybroGPS",
|
2019-10-24 08:35:47 -03:00
|
|
|
]
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def build_arducopter(self, tag):
|
|
|
|
'''build Copter binaries'''
|
2018-03-20 00:16:57 -03:00
|
|
|
boards = []
|
2019-09-12 00:08:43 -03:00
|
|
|
boards.extend(["skyviper-v2450", "aerofc-v1", "bebop", "CubeSolo", "CubeGreen-solo", "skyviper-journey"])
|
2018-03-20 00:16:57 -03:00
|
|
|
boards.extend(self.common_boards()[:])
|
2017-09-04 20:00:09 -03:00
|
|
|
self.build_vehicle(tag,
|
|
|
|
"ArduCopter",
|
|
|
|
boards,
|
|
|
|
"Copter",
|
|
|
|
"arducopter",
|
|
|
|
"ArduCopter",
|
|
|
|
frames=[None, "heli"])
|
|
|
|
|
|
|
|
def build_arduplane(self, tag):
|
|
|
|
'''build Plane binaries'''
|
|
|
|
boards = self.common_boards()[:]
|
|
|
|
boards.append("disco")
|
|
|
|
self.build_vehicle(tag,
|
|
|
|
"ArduPlane",
|
|
|
|
boards,
|
|
|
|
"Plane",
|
|
|
|
"arduplane",
|
|
|
|
"ArduPlane")
|
|
|
|
|
|
|
|
def build_antennatracker(self, tag):
|
|
|
|
'''build Tracker binaries'''
|
2019-04-05 15:55:39 -03:00
|
|
|
boards = self.common_boards()[:]
|
2017-09-04 20:00:09 -03:00
|
|
|
self.build_vehicle(tag,
|
|
|
|
"AntennaTracker",
|
|
|
|
boards,
|
|
|
|
"AntennaTracker",
|
|
|
|
"antennatracker",
|
|
|
|
"AntennaTracker",)
|
|
|
|
|
|
|
|
def build_rover(self, tag):
|
|
|
|
'''build Rover binaries'''
|
|
|
|
boards = self.common_boards()
|
|
|
|
self.build_vehicle(tag,
|
2020-03-26 21:51:16 -03:00
|
|
|
"Rover",
|
2017-09-04 20:00:09 -03:00
|
|
|
boards,
|
|
|
|
"Rover",
|
|
|
|
"ardurover",
|
2020-03-26 21:51:16 -03:00
|
|
|
"Rover")
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
def build_ardusub(self, tag):
|
|
|
|
'''build Sub binaries'''
|
|
|
|
self.build_vehicle(tag,
|
|
|
|
"ArduSub",
|
|
|
|
self.common_boards(),
|
|
|
|
"Sub",
|
|
|
|
"ardusub",
|
|
|
|
"ArduSub")
|
|
|
|
|
2019-10-24 08:35:47 -03:00
|
|
|
def build_AP_Periph(self, tag):
|
|
|
|
'''build AP_Periph binaries'''
|
|
|
|
boards = self.AP_Periph_boards()
|
|
|
|
self.build_vehicle(tag,
|
|
|
|
"AP_Periph",
|
|
|
|
boards,
|
|
|
|
"AP_Periph",
|
|
|
|
"AP_Periph",
|
|
|
|
"AP_Periph")
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def generate_manifest(self):
|
|
|
|
'''generate manigest files for GCS to download'''
|
|
|
|
self.progress("Generating manifest")
|
2019-12-07 16:44:14 -04:00
|
|
|
base_url = 'https://firmware.ardupilot.org'
|
2017-09-04 20:00:09 -03:00
|
|
|
generator = generate_manifest.ManifestGenerator(self.binaries,
|
|
|
|
base_url)
|
|
|
|
content = generator.json()
|
|
|
|
new_json_filepath = os.path.join(self.binaries, "manifest.json.new")
|
|
|
|
self.write_string_to_filepath(content, new_json_filepath)
|
|
|
|
# provide a pre-compressed manifest. For reference, a 7M manifest
|
|
|
|
# "gzip -9"s to 300k in 1 second, "xz -e"s to 80k in 26 seconds
|
|
|
|
new_json_filepath_gz = os.path.join(self.binaries,
|
|
|
|
"manifest.json.gz.new")
|
2019-04-29 03:45:12 -03:00
|
|
|
with gzip.open(new_json_filepath_gz, 'wb') as gf:
|
2020-07-19 23:54:19 -03:00
|
|
|
if running_python3:
|
|
|
|
content = bytes(content, 'ascii')
|
2019-04-29 05:12:48 -03:00
|
|
|
gf.write(content)
|
2017-09-04 20:00:09 -03:00
|
|
|
json_filepath = os.path.join(self.binaries, "manifest.json")
|
|
|
|
json_filepath_gz = os.path.join(self.binaries, "manifest.json.gz")
|
|
|
|
shutil.move(new_json_filepath, json_filepath)
|
|
|
|
shutil.move(new_json_filepath_gz, json_filepath_gz)
|
|
|
|
self.progress("Manifest generation successful")
|
|
|
|
|
2019-05-24 22:50:02 -03:00
|
|
|
self.progress("Generating stable releases")
|
|
|
|
gen_stable.make_all_stable(self.binaries)
|
|
|
|
self.progress("Generate stable releases done")
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def validate(self):
|
|
|
|
'''run pre-run validation checks'''
|
|
|
|
if "dirty" in self.tags:
|
|
|
|
if len(self.tags) > 1:
|
|
|
|
raise ValueError("dirty must be only tag if present (%s)" %
|
|
|
|
(str(self.tags)))
|
|
|
|
self.dirty = True
|
|
|
|
|
2017-10-25 19:25:35 -03:00
|
|
|
def pollute_env_from_file(self, filepath):
|
|
|
|
with open(filepath) as f:
|
|
|
|
for line in f:
|
2017-10-26 00:22:44 -03:00
|
|
|
try:
|
|
|
|
(name, value) = str.split(line, "=")
|
|
|
|
except ValueError as e:
|
|
|
|
self.progress("%s: split failed: %s" % (filepath, str(e)))
|
|
|
|
continue
|
2017-10-25 19:25:35 -03:00
|
|
|
value = value.rstrip()
|
2018-02-28 22:53:57 -04:00
|
|
|
self.progress("%s: %s=%s" % (filepath, name, value))
|
2017-10-25 19:25:35 -03:00
|
|
|
os.environ[name] = value
|
|
|
|
|
2018-04-09 23:48:21 -03:00
|
|
|
def remove_tmpdir(self):
|
|
|
|
if os.path.exists(self.tmpdir):
|
|
|
|
self.progress("Removing (%s)" % (self.tmpdir,))
|
|
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
|
2019-08-20 21:48:05 -03:00
|
|
|
def buildlogs_dirpath(self):
|
|
|
|
return os.getenv("BUILDLOGS",
|
|
|
|
os.path.join(os.getcwd(), "..", "buildlogs"))
|
|
|
|
|
2017-09-04 20:00:09 -03:00
|
|
|
def run(self):
|
|
|
|
self.validate()
|
|
|
|
|
|
|
|
prefix_bin_dirpath = os.path.join(os.environ.get('HOME'),
|
|
|
|
"prefix", "bin")
|
|
|
|
origin_env_path = os.environ.get("PATH")
|
|
|
|
os.environ["PATH"] = ':'.join([prefix_bin_dirpath, origin_env_path,
|
|
|
|
"/bin", "/usr/bin"])
|
2019-06-05 20:10:32 -03:00
|
|
|
if 'BUILD_BINARIES_PATH' in os.environ:
|
|
|
|
self.tmpdir = os.environ['BUILD_BINARIES_PATH']
|
|
|
|
else:
|
|
|
|
self.tmpdir = os.path.join(os.getcwd(), 'build.tmp.binaries')
|
2017-09-04 20:00:09 -03:00
|
|
|
os.environ["TMPDIR"] = self.tmpdir
|
|
|
|
|
|
|
|
print(self.tmpdir)
|
2019-04-29 05:12:48 -03:00
|
|
|
self.remove_tmpdir()
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
self.progress("Building in %s" % self.tmpdir)
|
|
|
|
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
self.progress(now)
|
|
|
|
|
|
|
|
if not self.dirty:
|
|
|
|
self.run_git(["checkout", "-f", "master"])
|
|
|
|
githash = self.run_git(["rev-parse", "HEAD"])
|
|
|
|
githash = githash.rstrip()
|
|
|
|
self.progress("git hash: %s" % str(githash))
|
|
|
|
|
|
|
|
self.hdate_ym = now.strftime("%Y-%m")
|
|
|
|
self.hdate_ymdhm = now.strftime("%Y-%m-%d-%H:%m")
|
|
|
|
|
2018-03-20 20:07:40 -03:00
|
|
|
self.mkpath(os.path.join("binaries", self.hdate_ym,
|
|
|
|
self.hdate_ymdhm))
|
2019-08-20 21:48:05 -03:00
|
|
|
self.binaries = os.path.join(self.buildlogs_dirpath(), "binaries")
|
2017-09-04 20:00:09 -03:00
|
|
|
self.basedir = os.getcwd()
|
2018-03-06 22:34:36 -04:00
|
|
|
self.error_strings = []
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
if os.path.exists("config.mk"):
|
|
|
|
# FIXME: narrow exception
|
2017-10-25 19:25:35 -03:00
|
|
|
self.pollute_env_from_file("config.mk")
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
if not self.dirty:
|
|
|
|
self.run_git_update_submodules()
|
|
|
|
self.buildroot = os.path.join(os.environ.get("TMPDIR"),
|
|
|
|
"binaries.build")
|
|
|
|
|
|
|
|
for tag in self.tags:
|
2020-03-06 22:41:41 -04:00
|
|
|
t0 = time.time()
|
2017-09-04 20:00:09 -03:00
|
|
|
self.build_arducopter(tag)
|
|
|
|
self.build_arduplane(tag)
|
|
|
|
self.build_rover(tag)
|
|
|
|
self.build_antennatracker(tag)
|
|
|
|
self.build_ardusub(tag)
|
2019-10-24 08:35:47 -03:00
|
|
|
self.build_AP_Periph(tag)
|
2020-03-06 22:41:41 -04:00
|
|
|
self.history.record_run(githash, tag, t0, time.time()-t0)
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
if os.path.exists(self.tmpdir):
|
|
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
|
|
|
|
self.generate_manifest()
|
|
|
|
|
2018-03-06 22:34:36 -04:00
|
|
|
for error_string in self.error_strings:
|
|
|
|
self.progress("%s" % error_string)
|
|
|
|
sys.exit(len(self.error_strings))
|
2017-09-04 20:00:09 -03:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = optparse.OptionParser("build_binaries.py")
|
|
|
|
|
|
|
|
parser.add_option("", "--tags", action="append", type="string",
|
|
|
|
default=[], help="tags to build")
|
|
|
|
cmd_opts, cmd_args = parser.parse_args()
|
|
|
|
|
|
|
|
tags = cmd_opts.tags
|
|
|
|
if len(tags) == 0:
|
|
|
|
# FIXME: wedge this defaulting into parser somehow
|
|
|
|
tags = ["stable", "beta", "latest"]
|
|
|
|
|
|
|
|
bb = build_binaries(tags)
|
|
|
|
bb.run()
|