travis-ci upload cppcheck, scan-build, and coverage to s3

This commit is contained in:
Daniel Agar 2017-04-23 23:48:36 -04:00
parent 1ebe215c60
commit e4f55e4526
4 changed files with 28 additions and 15 deletions

3
.gitignore vendored
View File

@ -93,3 +93,6 @@ airframes.md
airframes.xml airframes.xml
cppcheck-result.xml cppcheck-result.xml
cppcheck
scan-build

View File

@ -61,11 +61,11 @@ before_install:
env: env:
global: global:
# COVERITY KEY # COVERITY KEY
- secure: "NyaJoCGjU0Xc90Y6bxVYWLgjrJX5HlZsm/SPxruZ6I7xkGo19fJIFzGegOHQFR32D4AiKjllfjTUNy+ncckWplind0QwxtF4/kxXrz9XBfiby6X8jLYXIekrB6Ay0mBLGbniDdh+lpWtcyop6Dmkt5bdJCJuKY2nv9ENnhhs07M=" - secure: "NyaJoCGjU0Xc90Y6bxVYWLgjrJX5HlZsm/SPxruZ6I7xkGo19fJIFzGegOHQFR32D4AiKjllfjTUNy+ncckWplind0QwxtF4/kxXrz9XBfiby6X8jLYXIekrB6Ay0mBLGbniDdh+lpWtcyop6Dmkt5bdJCJuKY2nv9ENnhhs07M="
# AWS KEY: $PX4_AWS_KEY # AWS KEY: $PX4_AWS_KEY
- secure: "XknnZHWBbpHbN4f3fuAVwUztdLIu8ej4keC3aQSDofo3uw8AFEzojfsQsN9u77ShWSIV4iYJWh9C9ALkCx7TocJ+xYjiboo10YhM9lH/8u+EXjYWG6GHS8ua0wkir+cViSxoLNaMtmcb/rPTicJecAGANxLsIHyBAgTL3fkbLSA=" - secure: "XknnZHWBbpHbN4f3fuAVwUztdLIu8ej4keC3aQSDofo3uw8AFEzojfsQsN9u77ShWSIV4iYJWh9C9ALkCx7TocJ+xYjiboo10YhM9lH/8u+EXjYWG6GHS8ua0wkir+cViSxoLNaMtmcb/rPTicJecAGANxLsIHyBAgTL3fkbLSA="
# AWS SECRET: $PX4_AWS_SECRET # AWS SECRET: $PX4_AWS_SECRET
- secure: "h6oajlW68dWIr+wZhO58Dv6e68dZHrBLVA6lPXZmheFQBW6Xam1HuLGA0LOW6cL9TnrAsOZ8g4goB58eMQnMEijFZKi3mhRwZhd/Xjq/ZGJOWBUrLoQHZUw2dQk5ja5vmUlKEoQnFZjDuMjx8KfX5ZMNy8A3yssWZtJYHD8c+bk=" - secure: "h6oajlW68dWIr+wZhO58Dv6e68dZHrBLVA6lPXZmheFQBW6Xam1HuLGA0LOW6cL9TnrAsOZ8g4goB58eMQnMEijFZKi3mhRwZhd/Xjq/ZGJOWBUrLoQHZUw2dQk5ja5vmUlKEoQnFZjDuMjx8KfX5ZMNy8A3yssWZtJYHD8c+bk="
script: script:
@ -86,17 +86,21 @@ script:
fi fi
after_success: after_success:
# s3 deploy airframe and parameter metadata # s3 upload airframe and parameter metadata
- if [[ "${BUILD_TARGET}" = "px4_metadata" && "${TRAVIS_PULL_REQUEST}" = "false" && "${TRAVIS_BRANCH}" != "coverity" ]]; then - if [[ "${BUILD_TARGET}" = "px4_metadata" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then
./Tools/docker_run.sh 'make s3put_px4_metadata AWS_S3_BUCKET="px4-travis/Firmware/${TRAVIS_BRANCH}"'; ./Tools/docker_run.sh 'make s3put_metadata AWS_S3_BUCKET="px4-travis/Firmware/${TRAVIS_BRANCH}"';
fi fi
# s3 upload clang scan-build output for master branch # s3 upload clang scan-build output for master branch
- if [[ "${BUILD_TARGET}" = "scan-build" && "${TRAVIS_PULL_REQUEST}" = "false" && "${TRAVIS_BRANCH}" = "master" ]]; then - if [[ "${BUILD_TARGET}" = "scan-build" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then
./Tools/docker_run.sh 'make s3put_scan-build AWS_S3_BUCKET="px4-travis/scan-build"'; ./Tools/docker_run.sh 'make s3put_scan-build AWS_S3_BUCKET="px4-travis/Firmware/${TRAVIS_BRANCH}/scan-build"';
fi fi
# s3 upload cppcheck output for master branch # s3 upload cppcheck output for master branch
- if [[ "${BUILD_TARGET}" = "cppcheck" && "${TRAVIS_PULL_REQUEST}" = "false" && "${TRAVIS_BRANCH}" = "master" ]]; then - if [[ "${BUILD_TARGET}" = "cppcheck" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then
./Tools/docker_run.sh 'make s3put_cppcheck AWS_S3_BUCKET="px4-travis/cppcheck"'; ./Tools/docker_run.sh 'make s3put_cppcheck AWS_S3_BUCKET="px4-travis/Firmware/${TRAVIS_BRANCH}/cppcheck"';
fi
# s3 upload code coverage
- if [[ "${BUILD_TARGET}" = "tests_coverage" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then
./Tools/docker_run.sh 'make s3put_coverage AWS_S3_BUCKET="px4-travis/Firmware/${TRAVIS_BRANCH}/coverage"';
fi fi
# coveralls code coverage report # coveralls code coverage report
- if [[ "${BUILD_TARGET}" = "tests_coverage" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then - if [[ "${BUILD_TARGET}" = "tests_coverage" && "${TRAVIS_PULL_REQUEST}" = "false" ]]; then

View File

@ -287,10 +287,13 @@ s3put_px4_metadata: px4_metadata
@$(SRC_DIR)/Tools/s3put.sh parameters.md @$(SRC_DIR)/Tools/s3put.sh parameters.md
s3put_scan-build: scan-build s3put_scan-build: scan-build
$(SRC_DIR)/Tools/s3put.sh `find build_scan-build -mindepth 1 -maxdepth 1 -type d`/ @cd $(SRC_DIR) && ./Tools/s3put.sh `find build_scan-build -mindepth 1 -maxdepth 1 -type d`/
s3put_cppcheck: cppcheck s3put_cppcheck: cppcheck
$(SRC_DIR)/Tools/s3put.sh build_cppcheck-htmlreport/ @cd $(SRC_DIR) && ./Tools/s3put.sh cppcheck/
s3put_coverage: tests_coverage
@cd $(SRC_DIR) && ./Tools/s3put.sh coverage-html/
# Astyle # Astyle
# -------------------------------------------------------------------- # --------------------------------------------------------------------
@ -358,7 +361,7 @@ clang-tidy-quiet: posix_sitl_default-clang
cppcheck: posix_sitl_default cppcheck: posix_sitl_default
@cppcheck --enable=all --std=c++11 --std=c99 --std=posix --project=build_posix_sitl_default/compile_commands.json --xml-version=2 2> cppcheck-result.xml @cppcheck --enable=all --std=c++11 --std=c99 --std=posix --project=build_posix_sitl_default/compile_commands.json --xml-version=2 2> cppcheck-result.xml
@cppcheck-htmlreport --source-encoding=ascii --file=cppcheck-result.xml --report-dir=build_cppcheck-htmlreport --source-dir=$(SRC_DIR)/src/ @cppcheck-htmlreport --source-encoding=ascii --file=cppcheck-result.xml --report-dir=cppcheck --source-dir=$(SRC_DIR)/src/
# Cleanup # Cleanup
# -------------------------------------------------------------------- # --------------------------------------------------------------------

View File

@ -11,9 +11,12 @@ filename=${1}
[ -z "$AWS_SECRET_ACCESS_KEY" ] && { echo "ERROR: Need to set AWS_SECRET_ACCESS_KEY"; exit 1; } [ -z "$AWS_SECRET_ACCESS_KEY" ] && { echo "ERROR: Need to set AWS_SECRET_ACCESS_KEY"; exit 1; }
[ -z "$AWS_S3_BUCKET" ] && { echo "ERROR: Need to set AWS_S3_BUCKET"; exit 1; } [ -z "$AWS_S3_BUCKET" ] && { echo "ERROR: Need to set AWS_S3_BUCKET"; exit 1; }
if [ -e ${filename} ]; then if [ -f ${filename} ]; then
base_file_name=`basename $filename` base_file_name=`basename $filename`
s3cmd --access_key=${AWS_ACCESS_KEY_ID} --secret_key=${AWS_SECRET_ACCESS_KEY} put -r ${filename} s3://${AWS_S3_BUCKET}/${base_file_name} s3cmd --access_key=${AWS_ACCESS_KEY_ID} --secret_key=${AWS_SECRET_ACCESS_KEY} put ${filename} s3://${AWS_S3_BUCKET}/${base_file_name}
elif [ -d ${filename} ]; then
dir_name=$filename
s3cmd --access_key=${AWS_ACCESS_KEY_ID} --secret_key=${AWS_SECRET_ACCESS_KEY} put -r ${dir_name} s3://${AWS_S3_BUCKET}/
else else
echo "ERROR: ${file} doesn't exist" echo "ERROR: ${file} doesn't exist"
exit 1 exit 1