---
.jenkinsci/build-coverage.groovy | 15 +
.jenkinsci/debug-build.groovy | 160 +++---
.jenkinsci/docker-cleanup.groovy | 26 +-
.jenkinsci/docker-pull-or-build.groovy | 44 +-
.jenkinsci/enums.groovy | 12 +
.jenkinsci/github-api.groovy | 137 +++++
.jenkinsci/linux-post-step.groovy | 20 -
.jenkinsci/mac-debug-build.groovy | 44 ++
.jenkinsci/mac-release-build.groovy | 8 +-
.jenkinsci/notifications.groovy | 61 +++
.jenkinsci/post-step.groovy | 40 ++
.jenkinsci/pre-build.groovy | 14 +
.jenkinsci/release-build.groovy | 13 +-
.jenkinsci/selected-branches-coverage.groovy | 13 -
.jenkinsci/set-parallelism.groovy | 19 +
.jenkinsci/test-launcher.groovy | 41 ++
Jenkinsfile | 517 +++++++++++--------
17 files changed, 838 insertions(+), 346 deletions(-)
create mode 100644 .jenkinsci/build-coverage.groovy
create mode 100644 .jenkinsci/enums.groovy
create mode 100644 .jenkinsci/github-api.groovy
delete mode 100644 .jenkinsci/linux-post-step.groovy
create mode 100644 .jenkinsci/mac-debug-build.groovy
create mode 100644 .jenkinsci/notifications.groovy
create mode 100644 .jenkinsci/post-step.groovy
create mode 100644 .jenkinsci/pre-build.groovy
delete mode 100644 .jenkinsci/selected-branches-coverage.groovy
create mode 100644 .jenkinsci/set-parallelism.groovy
create mode 100644 .jenkinsci/test-launcher.groovy
diff --git a/.jenkinsci/build-coverage.groovy b/.jenkinsci/build-coverage.groovy
new file mode 100644
index 0000000000..2e9a050694
--- /dev/null
+++ b/.jenkinsci/build-coverage.groovy
@@ -0,0 +1,15 @@
+#!/usr/bin/env groovy
+
+def checkCoverageConditions() {
+ // trigger coverage if branch is master, or it is a open PR commit, or a merge request
+ def branch_coverage = ['master']
+
+ if ( params.coverage ) {
+ return true
+ }
+ else {
+ return env.GIT_LOCAL_BRANCH in branch_coverage || INITIAL_COMMIT_PR == "true" || MERGE_CONDITIONS_SATISFIED == "true"
+ }
+}
+
+return this
diff --git a/.jenkinsci/debug-build.groovy b/.jenkinsci/debug-build.groovy
index 47087c07eb..52ad96d383 100644
--- a/.jenkinsci/debug-build.groovy
+++ b/.jenkinsci/debug-build.groovy
@@ -4,26 +4,20 @@ def doDebugBuild(coverageEnabled=false) {
def dPullOrBuild = load ".jenkinsci/docker-pull-or-build.groovy"
def manifest = load ".jenkinsci/docker-manifest.groovy"
def pCommit = load ".jenkinsci/previous-commit.groovy"
- def parallelism = params.PARALLELISM
+ def setter = load ".jenkinsci/set-parallelism.groovy"
+ def parallelism = setter.setParallelism(params.PARALLELISM)
def platform = sh(script: 'uname -m', returnStdout: true).trim()
- def previousCommit = pCommit.previousCommitOrCurrent()
+ def prevCommit = pCommit.previousCommitOrCurrent()
// params are always null unless job is started
// this is the case for the FIRST build only.
// So just set this to same value as default.
// This is a known bug. See https://issues.jenkins-ci.org/browse/JENKINS-41929
- if (!parallelism) {
- parallelism = 4
- }
- if (env.NODE_NAME.contains('arm7')) {
- parallelism = 1
- }
- sh "docker network create ${env.IROHA_NETWORK}"
+
def iC = dPullOrBuild.dockerPullOrUpdate("${platform}-develop-build",
"${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile",
- "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/${prevCommit}/docker/develop/Dockerfile",
"${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile",
['PARALLELISM': parallelism])
-
if (GIT_LOCAL_BRANCH == 'develop' && manifest.manifestSupportEnabled()) {
manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:develop-build",
["${DOCKER_REGISTRY_BASENAME}:x86_64-develop-build",
@@ -42,6 +36,61 @@ def doDebugBuild(coverageEnabled=false) {
manifest.manifestPush("${DOCKER_REGISTRY_BASENAME}:develop-build", login, password)
}
}
+
+ iC.inside(""
+ + " -e IROHA_POSTGRES_HOST=${env.IROHA_POSTGRES_HOST}"
+ + " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}"
+ + " -e IROHA_POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
+ + " -e IROHA_POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
+ + " -v ${CCACHE_DIR}:${CCACHE_DIR}") {
+ def scmVars = checkout scm
+ def cmakeOptions = ""
+
+ if ( coverageEnabled ) {
+ cmakeOptions = " -DCOVERAGE=ON "
+ }
+ env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
+ env.IROHA_HOME = "/opt/iroha"
+ env.IROHA_BUILD = "${env.IROHA_HOME}/build"
+
+ sh """
+ ccache --version
+ ccache --show-stats
+ ccache --zero-stats
+ ccache --max-size=5G
+ """
+ sh """
+ cmake \
+ -DTESTING=ON \
+ -H. \
+ -Bbuild \
+ -DCMAKE_BUILD_TYPE=${params.build_type} \
+ -DIROHA_VERSION=${env.IROHA_VERSION} \
+ ${cmakeOptions}
+ """
+ sh "cmake --build build -- -j${parallelism}"
+ sh "ccache --show-stats"
+ }
+}
+
+// do not implement any checks as coverage runs only on x86_64
+def doPreCoverageStep() {
+ sh "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
+ def iC = docker.image("${DOCKER_AGENT_IMAGE}")
+ iC.inside() {
+ sh "cmake --build build --target coverage.init.info"
+ }
+}
+
+// run specified test categories
+def doTestStep(testList) {
+ if (env.NODE_NAME.contains('x86_64')) {
+ sh "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
+ }
+
+ def iC = docker.image("${DOCKER_AGENT_IMAGE}")
+ sh "docker network create ${env.IROHA_NETWORK}"
+
docker.image('postgres:9.5').withRun(""
+ " -e POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
+ " -e POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
@@ -52,63 +101,44 @@ def doDebugBuild(coverageEnabled=false) {
+ " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}"
+ " -e IROHA_POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
+ " -e IROHA_POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
- + " --network=${env.IROHA_NETWORK}"
- + " -v /var/jenkins/ccache:${CCACHE_DIR}"
- + " -v /tmp/${GIT_COMMIT}-${BUILD_NUMBER}:/tmp/${GIT_COMMIT}") {
-
- def scmVars = checkout scm
- def cmakeOptions = ""
- if ( coverageEnabled ) {
- cmakeOptions = " -DCOVERAGE=ON "
- }
- env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
- env.IROHA_HOME = "/opt/iroha"
- env.IROHA_BUILD = "${env.IROHA_HOME}/build"
-
- sh """
- ccache --version
- ccache --show-stats
- ccache --zero-stats
- ccache --max-size=5G
- """
- sh """
- cmake \
- -DTESTING=ON \
- -H. \
- -Bbuild \
- -DCMAKE_BUILD_TYPE=Debug \
- -DIROHA_VERSION=${env.IROHA_VERSION} \
- ${cmakeOptions}
- """
- sh "cmake --build build -- -j${parallelism}"
- sh "ccache --show-stats"
- if ( coverageEnabled ) {
- sh "cmake --build build --target coverage.init.info"
- }
- def testExitCode = sh(script: """cd build && ctest --output-on-failure""", returnStatus: true)
- if (testExitCode != 0) {
- currentBuild.result = "UNSTABLE"
- }
- if ( coverageEnabled ) {
- sh "cmake --build build --target cppcheck"
- // Sonar
- if (env.CHANGE_ID != null) {
- sh """
- sonar-scanner \
- -Dsonar.github.disableInlineComments \
- -Dsonar.github.repository='${DOCKER_REGISTRY_BASENAME}' \
- -Dsonar.analysis.mode=preview \
- -Dsonar.login=${SONAR_TOKEN} \
- -Dsonar.projectVersion=${BUILD_TAG} \
- -Dsonar.github.oauth=${SORABOT_TOKEN} \
- -Dsonar.github.pullRequest=${CHANGE_ID}
- """
+ + " --network=${env.IROHA_NETWORK}") {
+ def testExitCode = sh(script: """cd build && ctest --output-on-failure -R '${testList}' """, returnStatus: true)
+ if (testExitCode != 0) {
+ currentBuild.result = "UNSTABLE"
}
- sh "cmake --build build --target coverage.info"
- sh "python /tmp/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
- cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
}
}
+}
+
+// do cobertura analysis
+// same as for doPreCoverageStep()
+def doPostCoverageCoberturaStep() {
+ sh "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
+ def iC = docker.image("${DOCKER_AGENT_IMAGE}")
+ iC.inside() {
+ sh "cmake --build build --target coverage.info"
+ sh "python /tmp/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
+ cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
+ }
+}
+
+// do cppcheck analysis
+// same as for doPreCoverageStep()
+def doPostCoverageSonarStep() {
+ sh "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
+ def iC = docker.image("${DOCKER_AGENT_IMAGE}")
+ iC.inside() {
+ sh "cmake --build build --target cppcheck"
+ sh """
+ sonar-scanner \
+ -Dsonar.github.disableInlineComments \
+ -Dsonar.github.repository='hyperledger/iroha' \
+ -Dsonar.analysis.mode=preview \
+ -Dsonar.login=${SONAR_TOKEN} \
+ -Dsonar.projectVersion=${BUILD_TAG} \
+ -Dsonar.github.oauth=${SORABOT_TOKEN} \
+ -Dsonar.github.pullRequest=${CHANGE_ID}
+ """
}
}
diff --git a/.jenkinsci/docker-cleanup.groovy b/.jenkinsci/docker-cleanup.groovy
index c789383b28..b4eda7a04c 100644
--- a/.jenkinsci/docker-cleanup.groovy
+++ b/.jenkinsci/docker-cleanup.groovy
@@ -1,17 +1,23 @@
#!/usr/bin/env groovy
+// remove docker network and stale images
def doDockerCleanup() {
+ sh """
+ # Check whether the image is the last-standing man
+ # i.e., no other tags exist for this image
+ docker rmi \$(docker images --no-trunc --format '{{.Repository}}:{{.Tag}}\\t{{.ID}}' | grep \$(docker images --no-trunc --format '{{.ID}}' ${iC.id}) | head -n -1 | cut -f 1) || true
+ """
+}
+
+// cleanup docker network created for the test stage
+def doDockerNetworkCleanup() {
+ sh "docker network rm ${env.IROHA_NETWORK}"
+}
- sh """
- # Check whether the image is the last-standing man
- # i.e., no other tags exist for this image
- docker rmi \$(docker images --no-trunc --format '{{.Repository}}:{{.Tag}}\\t{{.ID}}' | grep \$(docker images --no-trunc --format '{{.ID}}' ${iC.id}) | head -n -1 | cut -f 1) || true
- sleep 5
- docker network rm $IROHA_NETWORK || true
- #remove folder with iroha.deb package and Dockerfiles
- rm -rf /tmp/${env.GIT_COMMIT}-${BUILD_NUMBER}
- rm -rf /tmp/${env.GIT_COMMIT}
- """
+// cleanup docker images which weren't used for more that 20 days and image for this PR in case of successful PR
+def doStaleDockerImagesCleanup() {
+ sh "find ${JENKINS_DOCKER_IMAGE_DIR} -type f -mtime +20 -exec rm -f {} \\;"
+ sh "rm -f ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
}
return this
diff --git a/.jenkinsci/docker-pull-or-build.groovy b/.jenkinsci/docker-pull-or-build.groovy
index d699ac40bb..9d74931db8 100644
--- a/.jenkinsci/docker-pull-or-build.groovy
+++ b/.jenkinsci/docker-pull-or-build.groovy
@@ -19,31 +19,52 @@ def buildOptionsString(options) {
def dockerPullOrUpdate(imageName, currentDockerfileURL, previousDockerfileURL, referenceDockerfileURL, buildOptions=null) {
buildOptions = buildOptionsString(buildOptions)
+ def uploadExitCode = 0
+ // GIT_PREVIOUS_COMMIT is null for first PR build
def commit = sh(script: "echo ${GIT_LOCAL_BRANCH} | md5sum | cut -c 1-8", returnStdout: true).trim()
+ DOCKER_IMAGE_FILE = commit
+
if (remoteFilesDiffer(currentDockerfileURL, previousDockerfileURL)) {
// Dockerfile has been changed compared to the previous commit
// Worst case scenario. We cannot count on the local cache
// because Dockerfile may contain apt-get entries that would try to update
// from invalid (stale) addresses
iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ if (env.NODE_NAME.contains('x86_64')) {
+ sh "docker save -o ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE} ${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}"
+ }
}
else {
+ // upload reference image (hyperledger/iroha:develop-build) (required in all use-cases in this execution branch)
+ if (env.NODE_NAME.contains('x86_64')) {
+ uploadExitCode = sh(script: "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${imageName}", returnStatus: true)
+ if (uploadExitCode != 0) {
+ sh "echo 'Reference image ${DOCKER_REGISTRY_BASENAME}:${imageName} doesn't exist on the EFS"
+ }
+ }
+ def pullExitCode = sh(script: "docker pull ${DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true)
+ if (pullExitCode != 0) {
+ sh "echo 'Reference image ${DOCKER_REGISTRY_BASENAME}:${imageName} doesn't exist on the dockerhub"
+ }
+ else {
+ // save reference docker image into the file when it is doesn't exist on the EFS
+ if (uploadExitCode != 0) {
+ sh "docker save -o ${JENKINS_DOCKER_IMAGE_DIR}/${imageName} ${DOCKER_REGISTRY_BASENAME}:${imageName}"
+ }
+ }
// first commit in this branch or Dockerfile modified
if (remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)) {
// if we're lucky to build on the same agent, image will be built using cache
- iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ // for x86 download reference image and start build
+ iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "${buildOptions} -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ if (env.NODE_NAME.contains('x86_64')) {
+ sh "docker save -o /tmp/docker/${imageName} ${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}"
+ }
}
else {
- // try pulling image from Dockerhub, probably image is already there
- def testExitCode = sh(script: "docker pull ${DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true)
- if (testExitCode != 0) {
- // image does not (yet) exist on Dockerhub. Build it
- iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
- }
- else {
- // no difference found compared to both previous and reference Dockerfile
- iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${imageName}")
- }
+ // now we get reference image from the file (pull from dockerhub)
+ iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${imageName}")
+ DOCKER_IMAGE_FILE = imageName
}
}
if (GIT_LOCAL_BRANCH ==~ /develop|master/) {
@@ -51,6 +72,7 @@ def dockerPullOrUpdate(imageName, currentDockerfileURL, previousDockerfileURL, r
iC.push(imageName)
}
}
+ DOCKER_AGENT_IMAGE = iC.imageName()
return iC
}
diff --git a/.jenkinsci/enums.groovy b/.jenkinsci/enums.groovy
new file mode 100644
index 0000000000..32e383c6e6
--- /dev/null
+++ b/.jenkinsci/enums.groovy
@@ -0,0 +1,12 @@
+// types of tests provided by the developers (can be found at the CMakeLists.txt files)
+enum TestTypes {
+ module(0), integration(1), system(2), cmake(3), regression(4), benchmark(5), framework(6)
+ TestTypes(int order) {
+ this.order = order
+ }
+ private final int order
+ int getOrder() {
+ order
+ }
+}
+return this
diff --git a/.jenkinsci/github-api.groovy b/.jenkinsci/github-api.groovy
new file mode 100644
index 0000000000..9441daf68f
--- /dev/null
+++ b/.jenkinsci/github-api.groovy
@@ -0,0 +1,137 @@
+#!/usr/bin/env groovy
+
+// list of the pull requests reviews status on github
+enum GithubPRStatus {
+ APPROVED, CHANGES_REQUESTED, REVIEW_REQUESTED
+}
+// list of the possible merge strategies
+enum MergeTarget {
+ merge, squash
+}
+// list of supportable target branches for automated merge (by CI)
+enum ChangeTarget {
+ master, develop, trunk
+}
+
+// map with user:review_status
+pullRequestReviewers = [:]
+
+// merges pull request using GitHub API in case it meets the merging criteria
+def mergePullRequest() {
+ if ( ! ( checkMergeAcceptance() ) ) { return false }
+ withCredentials([string(credentialsId: 'jenkins-integration-test', variable: 'sorabot')]) {
+ def slurper = new groovy.json.JsonSlurperClassic()
+ def commitTitle = ""
+ def commitMessage = ""
+ def mergeMethod = getMergeMethod()
+ def jsonResponseMerge = sh(script: """
+ curl -H "Authorization: token ${sorabot}" \
+ -H "Accept: application/vnd.github.v3+json" \
+ -X PUT --data '{"commit_title":"${commitTitle}","commit_message":"${commitMessage}","sha":"${env.GIT_COMMIT}","merge_method":"${mergeMethod}"}' \
+ -w "%{http_code}" https://api.github.com/repos/hyperledger/iroha/pulls/${CHANGE_ID}/merge""", returnStdout: true)
+ def githubResponse = sh(script:"""set +x; printf '%s\n' "${jsonResponseMerge}" | tail -n 1; set -x""", returnStdout: true).trim()
+ jsonResponseMerge = slurper.parseText(jsonResponseMerge)
+ if (jsonResponseMerge.merged != "true" || !(githubResponse ==~ "200")) {
+ echo jsonResponseMerge.message
+ return false
+ }
+ return true
+ }
+}
+
+// check merge acceptance by:
+// - at least 2 "approved and NO "changes_requested" in reviews
+// - e-mail of the commit does not match Jenkins user who launched this build
+def checkMergeAcceptance() {
+ def approvalsRequired = 0
+ // fill the map of user:review_status
+ getPullRequestReviewers()
+ pullRequestReviewers.each{ user, review_status ->
+ if (review_status == GithubPRStatus.APPROVED.toString()) {
+ approvalsRequired += 1
+ }
+ else if (review_status == GithubPRStatus.CHANGES_REQUESTED.toString()) {
+ return false
+ }
+ }
+ if (approvalsRequired < 2) {
+ sh "echo 'Merge failed. Get more PR approvals before merging'"
+ return false
+ }
+ return true
+}
+
+// returns merge method based on target branch (squash&merge vs merge)
+def getMergeMethod() {
+ if (env.CHANGE_TARGET == ChangeTarget.master.toString()) {
+ return MergeTarget.merge.toString()
+ }
+ else {
+ return MergeTarget.squash.toString()
+ }
+}
+
+// fill the pullRequestReviews map with user:review status
+def getPullRequestReviewers() {
+ def slurper = new groovy.json.JsonSlurperClassic()
+ // if there more than 1 page of "reviews" in PR (it happens due to huge amount of comments)
+ def reviewPaging = sh(script: """curl -I https://api.github.com/repos/hyperledger/iroha/pulls/1392/reviews | grep -E "^Link:" | wc -l""", returnStdout: true)
+ def reviewPagesCount = "1"
+ if (reviewPaging.toInteger()) {
+ reviewPagesCount = sh(script: """ curl -I https://api.github.com/repos/hyperledger/iroha/pulls/1392/reviews | grep -E "^Link:" | awk 'BEGIN { FS = "page" } { print \$NF }' | awk -F"=" '{print \$2}' | awk -F">" '{print \$1}' """, returnStdout: true)
+ }
+ // start the loop to request pages sequentially
+ for(pageID in (1..reviewPagesCount.toInteger())) {
+ def jsonResponseReview = sh(script: """
+ curl https://api.github.com/repos/hyperledger/iroha/pulls/${CHANGE_ID}/reviews?page=${pageID}
+ """, returnStdout: true).trim()
+ // process returned reviews. add/update user:review_status to the map
+ jsonResponseReview = slurper.parseText(jsonResponseReview)
+ if (jsonResponseReview.size() > 0) {
+ jsonResponseReview.each {
+ if (it.state.toString() in [GithubPRStatus.APPROVED.toString(), GithubPRStatus.CHANGES_REQUESTED.toString()]) {
+ pullRequestReviewers[it.user.login.toString()] = it.state.toString()
+ }
+ }
+ }
+ }
+ // get requested reviewers (those who did not review this PR yet)
+ def jsonResponseReviewers = sh(script: """
+ curl https://api.github.com/repos/hyperledger/iroha/pulls/${CHANGE_ID}/requested_reviewers
+ """, returnStdout: true).trim()
+ jsonResponseReviewers = slurper.parseText(jsonResponseReviewers)
+ if (jsonResponseReviewers.size() > 0) {
+ jsonResponseReviewers.users.each {
+ pullRequestReviewers[it.login] = GithubPRStatus.REVIEW_REQUESTED.toString()
+ }
+ }
+}
+
+// returns PR reviewers in the form of "@reviewer1 @reviewer2 ... @reviewerN" to mention PR reviewers about build result
+def getUsersMentionList() {
+ getPullRequestReviewers()
+ def ghUsersList = ''
+ pullRequestReviewers.each{ user, review_status -> ghUsersList = ["@${user}", ghUsersList].join(' ') }
+ return ghUsersList
+}
+
+// post a comment on PR via GitHub API
+def writePullRequestComment() {
+ def ghUsersList = getUsersMentionList()
+ withCredentials([string(credentialsId: 'jenkins-integration-test', variable: 'sorabot')]) {
+ def slurper = new groovy.json.JsonSlurperClassic()
+ def jsonResponseComment = sh(script: """
+ curl -H "Authorization: token ${sorabot}" \
+ -H "Accept: application/vnd.github.v3+json" \
+ -X POST --data '{"body":"${ghUsersList} commit ${env.GIT_COMMIT} build status: ${currentBuild.currentResult}. build URL: ${BUILD_URL}"}' \
+ -w "%{http_code}" https://api.github.com/repos/hyperledger/iroha/issues/${CHANGE_ID}/comments
+ """, returnStdout: true).trim()
+ def githubResponse = sh(script:"""set +x; printf '%s\n' "${jsonResponseComment}" | tail -n 1 ; set -x""", returnStdout: true).trim()
+ if (githubResponse ==~ "201") {
+ return true
+ }
+ }
+ return false
+}
+
+return this
diff --git a/.jenkinsci/linux-post-step.groovy b/.jenkinsci/linux-post-step.groovy
deleted file mode 100644
index c28f6f6022..0000000000
--- a/.jenkinsci/linux-post-step.groovy
+++ /dev/null
@@ -1,20 +0,0 @@
-def linuxPostStep() {
- timeout(time: 600, unit: "SECONDS") {
- try {
- if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- def artifacts = load ".jenkinsci/artifacts.groovy"
- def commit = env.GIT_COMMIT
- def platform = sh(script: 'uname -m', returnStdout: true).trim()
- filePaths = [ '/tmp/${GIT_COMMIT}-${BUILD_NUMBER}/*' ]
- artifacts.uploadArtifacts(filePaths, sprintf('/iroha/linux/%4$s/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6), platform]))
- }
- }
- finally {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
- }
-}
-
-return this
diff --git a/.jenkinsci/mac-debug-build.groovy b/.jenkinsci/mac-debug-build.groovy
new file mode 100644
index 0000000000..7eb118758b
--- /dev/null
+++ b/.jenkinsci/mac-debug-build.groovy
@@ -0,0 +1,44 @@
+#!/usr/bin/env groovy
+
+def doDebugBuild() {
+ def setter = load ".jenkinsci/set-parallelism.groovy"
+ def parallelism = setter.setParallelism(params.PARALLELISM)
+ def scmVars = checkout scm
+ env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
+ env.IROHA_HOME = "/opt/iroha"
+ env.IROHA_BUILD = "${env.IROHA_HOME}/build"
+
+ sh """
+ ccache --version
+ ccache --show-stats
+ ccache --zero-stats
+ ccache --max-size=5G
+ """
+ sh """
+ cmake \
+ -DTESTING=ON \
+ -H. \
+ -Bbuild \
+ -DCMAKE_BUILD_TYPE=${params.build_type} \
+ -DIROHA_VERSION=${env.IROHA_VERSION}
+ """
+ sh "cmake --build build -- -j${parallelism}"
+ sh "ccache --show-stats"
+}
+
+def doTestStep(testList) {
+ sh """
+ export IROHA_POSTGRES_PASSWORD=${IROHA_POSTGRES_PASSWORD}; \
+ export IROHA_POSTGRES_USER=${IROHA_POSTGRES_USER}; \
+ mkdir -p /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}; \
+ initdb -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ -U ${IROHA_POSTGRES_USER} --pwfile=<(echo ${IROHA_POSTGRES_PASSWORD}); \
+ pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ -o '-p 5433' -l /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/events.log start; \
+ psql -h localhost -d postgres -p 5433 -U ${IROHA_POSTGRES_USER} --file=<(echo create database ${IROHA_POSTGRES_USER};)
+ """
+ def testExitCode = sh(script: """cd build && IROHA_POSTGRES_HOST=localhost IROHA_POSTGRES_PORT=5433 ctest --output-on-failure -R '${testList}' """, returnStatus: true)
+ if (testExitCode != 0) {
+ currentBuild.result = "UNSTABLE"
+ }
+}
+
+return this
diff --git a/.jenkinsci/mac-release-build.groovy b/.jenkinsci/mac-release-build.groovy
index 4ff20c6a95..26552f4ed4 100644
--- a/.jenkinsci/mac-release-build.groovy
+++ b/.jenkinsci/mac-release-build.groovy
@@ -1,6 +1,8 @@
#!/usr/bin/env groovy
def doReleaseBuild(coverageEnabled=false) {
+ def setter = load ".jenkinsci/set-parallelism.groovy"
+ def parallelism = setter.setParallelism(params.PARALLELISM)
def scmVars = checkout scm
env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
env.IROHA_HOME = "/opt/iroha"
@@ -17,10 +19,10 @@ def doReleaseBuild(coverageEnabled=false) {
-Bbuild \
-DCOVERAGE=OFF \
-DPACKAGE_TGZ=ON \
- -DCMAKE_BUILD_TYPE=Release \
+ -DCMAKE_BUILD_TYPE=${params.build_type} \
-DIROHA_VERSION=${env.IROHA_VERSION}
-
- cmake --build build --target package -- -j${params.PARALLELISM}
+
+ cmake --build build --target package -- -j${parallelism}
mv ./build/iroha-${env.IROHA_VERSION}-*.tar.gz ./build/iroha.tar.gz
ccache --show-stats
"""
diff --git a/.jenkinsci/notifications.groovy b/.jenkinsci/notifications.groovy
new file mode 100644
index 0000000000..2c527ef6d0
--- /dev/null
+++ b/.jenkinsci/notifications.groovy
@@ -0,0 +1,61 @@
+#!/usr/bin/env groovy
+
+def notifyBuildResults() {
+ def mergeMessage = ''
+ def receivers = ''
+ // notify commiter in case of branch commit
+ if ( env.CHANGE_ID == null ) {
+ sendEmail(buildContent(mergeMessage), "${GIT_COMMITER_EMAIL}")
+ return
+ }
+ // merge commit build results
+ if ( params.merge_pr ) {
+ if ( currentBuild.currentResult == "SUCCESS" ) {
+ mergeMessage = "Merge status to ${env.CHANGE_TARGET}: true"
+ }
+ else {
+ mergeMessage = "Merge status to ${env.CHANGE_TARGET}: false"
+ }
+
+ if ( env.CHANGE_TARGET == 'master' ) {
+ receivers = "iroha-maintainers@soramitsu.co.jp"
+ }
+ else if ( env.CHANGE_TARGET == 'develop' ) {
+ receivers = "andrei@soramitsu.co.jp, fyodor@soramitsu.co.jp, ${GIT_COMMITER_EMAIL}"
+ }
+ else {
+ receivers = "${GIT_COMMITER_EMAIL}"
+ }
+
+ sendEmail(buildContent(mergeMessage), receivers)
+ }
+ else {
+ // write comment to the PR page on github if it is a pull request commit
+ def notify = load ".jenkinsci/github-api.groovy"
+ notify.writePullRequestComment()
+ }
+ return
+}
+
+def buildContent(mergeMessage="") {
+ return """
+This email informs you about the build results on Jenkins CI
+Build status: ${currentBuild.currentResult}. ${mergeMessage}
+
+Check console output to view the results.
+
+
+You can find the build log attached to this email
+
+"""
+}
+
+def sendEmail(content, to) {
+ emailext( subject: '$DEFAULT_SUBJECT',
+ body: "${content}",
+ attachLog: true,
+ compressLog: true,
+ to: "${to}"
+ )
+}
+return this
diff --git a/.jenkinsci/post-step.groovy b/.jenkinsci/post-step.groovy
new file mode 100644
index 0000000000..e5eab76376
--- /dev/null
+++ b/.jenkinsci/post-step.groovy
@@ -0,0 +1,40 @@
+#!/usr/bin/env groovy
+
+// upload artifacts in release builds
+def postStep() {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ def platform = sh(script: 'uname -m', returnStdout: true).trim()
+ filePaths = [ '/tmp/${GIT_COMMIT}-${BUILD_NUMBER}/*' ]
+ artifacts.uploadArtifacts(filePaths, sprintf('/iroha/linux/%4$s/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6), platform]))
+}
+
+// upload artifacts in release builds (for mac)
+def macPostStep() {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ filePaths = [ '\$(pwd)/build/*.tar.gz' ]
+ artifacts.uploadArtifacts(filePaths, sprintf('/iroha/macos/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)]))
+}
+
+// clean folders after the build
+def cleanUp() {
+ if ( ! env.NODE_NAME.contains('x86_64') ) {
+ sh """
+ #remove folder with iroha.deb package and Dockerfiles
+ rm -rf /tmp/${env.GIT_COMMIT}-${BUILD_NUMBER}
+ rm -rf /tmp/${env.GIT_COMMIT}
+ """
+ }
+ cleanWs()
+}
+
+// stop postgres and remove workspace folder (for mac)
+def macCleanUp() {
+ sh """
+ rm -rf /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/
+ """
+ cleanWs()
+}
+
+return this
diff --git a/.jenkinsci/pre-build.groovy b/.jenkinsci/pre-build.groovy
new file mode 100644
index 0000000000..a6875bc28f
--- /dev/null
+++ b/.jenkinsci/pre-build.groovy
@@ -0,0 +1,14 @@
+#!/usr/bin/env groovy
+
+// replaces bunch of expressions in `when` block at the `stage` in Jenkinsfile
+def prepare() {
+ def mergeBranches = env.CHANGE_TARGET ==~ /(master|develop|trunk)/ ? "true" : "false"
+ def docsPullRequestBranches = env.CHANGE_TARGET ==~ /(master|develop)/ ? "true" : "false"
+ def pullRequestCommit = env.CHANGE_ID && env.GIT_PREVIOUS_COMMIT ? "true" : "false"
+ INITIAL_COMMIT_PR = env.CHANGE_ID && env.GIT_PREVIOUS_COMMIT == null ? "true" : "false"
+ MERGE_CONDITIONS_SATISFIED = (mergeBranches == "true" && pullRequestCommit == "true" && params.merge_pr) ? "true" : "false"
+ REST_PR_CONDITIONS_SATISFIED = (docsPullRequestBranches == "true" && pullRequestCommit == "true" && params.merge_pr) ? "true" : "false"
+ GIT_COMMITER_EMAIL = sh(script: """git --no-pager show -s --format='%ae' ${env.GIT_COMMIT}""", returnStdout: true).trim()
+}
+
+return this
diff --git a/.jenkinsci/release-build.groovy b/.jenkinsci/release-build.groovy
index d24d6cfb31..422b567c83 100644
--- a/.jenkinsci/release-build.groovy
+++ b/.jenkinsci/release-build.groovy
@@ -1,25 +1,20 @@
#!/usr/bin/env groovy
def doReleaseBuild() {
- def parallelism = params.PARALLELISM
def manifest = load ".jenkinsci/docker-manifest.groovy"
// params are always null unless job is started
// this is the case for the FIRST build only.
// So just set this to same value as default.
// This is a known bug. See https://issues.jenkins-ci.org/browse/JENKINS-41929
- if (!parallelism) {
- parallelism = 4
- }
- if (env.NODE_NAME.contains('arm7')) {
- parallelism = 1
- }
+ def setter = load ".jenkinsci/set-parallelism.groovy"
+ def parallelism = setter.setParallelism(params.PARALLELISM)
def platform = sh(script: 'uname -m', returnStdout: true).trim()
sh "mkdir /tmp/${env.GIT_COMMIT}-${BUILD_NUMBER} || true"
iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${platform}-develop-build")
iC.pull()
iC.inside(""
+ " -v /tmp/${GIT_COMMIT}-${BUILD_NUMBER}:/tmp/${GIT_COMMIT}"
- + " -v /var/jenkins/ccache:${CCACHE_RELEASE_DIR}") {
+ + " -v ${CCACHE_RELEASE_DIR}:${CCACHE_DIR}") {
def scmVars = checkout scm
env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
@@ -36,7 +31,7 @@ def doReleaseBuild() {
cmake \
-H. \
-Bbuild \
- -DCMAKE_BUILD_TYPE=Release \
+ -DCMAKE_BUILD_TYPE=${params.build_type} \
-DIROHA_VERSION=${env.IROHA_VERSION} \
-DPACKAGE_DEB=ON \
-DPACKAGE_TGZ=ON \
diff --git a/.jenkinsci/selected-branches-coverage.groovy b/.jenkinsci/selected-branches-coverage.groovy
deleted file mode 100644
index d6c3f95967..0000000000
--- a/.jenkinsci/selected-branches-coverage.groovy
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env groovy
-
-def selectedBranchesCoverage(branches, PRCoverage=true) {
- // trigger coverage if branch is either develop or master, or it is a PR
- if (PRCoverage) {
- return env.GIT_LOCAL_BRANCH in branches || env.CHANGE_ID != null
- }
- else {
- return env.GIT_LOCAL_BRANCH in branches
- }
-}
-
-return this
\ No newline at end of file
diff --git a/.jenkinsci/set-parallelism.groovy b/.jenkinsci/set-parallelism.groovy
new file mode 100644
index 0000000000..581488f7ca
--- /dev/null
+++ b/.jenkinsci/set-parallelism.groovy
@@ -0,0 +1,19 @@
+#!/usr/bin/env groovy
+
+def setParallelism(defaultParameter) {
+ if (!defaultParameter) {
+ return 4
+ }
+ if (env.NODE_NAME.contains('arm7')) {
+ return 1
+ }
+ if (env.NODE_NAME.contains('mac')) {
+ return 4
+ }
+ if (env.NODE_NAME.contains('x86_64')) {
+ return 8
+ }
+ return defaultParameter
+}
+
+return this
diff --git a/.jenkinsci/test-launcher.groovy b/.jenkinsci/test-launcher.groovy
new file mode 100644
index 0000000000..924b0cd6f4
--- /dev/null
+++ b/.jenkinsci/test-launcher.groovy
@@ -0,0 +1,41 @@
+#!/usr/bin/env groovy
+
+// format the enum elements output like "(val1|val2|...|valN)*"
+def printRange(start, end) {
+ def output = ""
+ def set = start..end
+ TestTypes.values().each { t ->
+ if (t.getOrder() in set) {
+ output = [output, (t.getOrder() != start ? "|" : ""), t.name()].join('')
+ }
+ }
+ return ["(", output, ")*"].join('')
+}
+
+// return tests list regex that will be launched by ctest
+def chooseTestType() {
+ if (params.merge_pr) {
+ if (env.NODE_NAME.contains('x86_64')) {
+ // choose module, integration, system, cmake, regression tests
+ return printRange(TestTypes.module.getOrder(), TestTypes.regression.getOrder())
+ }
+ else {
+ // not to do any tests
+ return ""
+ }
+ }
+ if (params.nightly) {
+ if (env.NODE_NAME.contains('x86_64')) {
+ // choose all tests
+ return printRange(TestTypes.MIN_VALUE.getOrder(), TestTypes.MAX_VALUE.getOrder())
+ }
+ else {
+ // choose module, integration, system, cmake, regression tests
+ return printRange(TestTypes.module.getOrder(), TestTypes.regression.getOrder())
+ }
+ }
+ // just choose module tests
+ return [TestTypes.module.toString(), "*"].join('')
+}
+
+return this
diff --git a/Jenkinsfile b/Jenkinsfile
index 2fd2412580..cb78c4b76e 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,12 +1,14 @@
properties([parameters([
- booleanParam(defaultValue: true, description: 'Build `iroha`', name: 'iroha'),
- booleanParam(defaultValue: false, description: 'Build `bindings`', name: 'bindings'),
booleanParam(defaultValue: true, description: '', name: 'x86_64_linux'),
booleanParam(defaultValue: false, description: '', name: 'armv7_linux'),
booleanParam(defaultValue: false, description: '', name: 'armv8_linux'),
- booleanParam(defaultValue: true, description: '', name: 'x86_64_macos'),
+ booleanParam(defaultValue: false, description: '', name: 'x86_64_macos'),
booleanParam(defaultValue: false, description: '', name: 'x86_64_win'),
+ booleanParam(defaultValue: false, description: 'Build coverage', name: 'coverage'),
+ booleanParam(defaultValue: false, description: 'Merge this PR to target after success build', name: 'merge_pr'),
+ booleanParam(defaultValue: false, description: 'Scheduled nightly build', name: 'nightly'),
choice(choices: 'Debug\nRelease', description: 'Iroha build type', name: 'build_type'),
+ booleanParam(defaultValue: false, description: 'Build `bindings`', name: 'bindings'),
booleanParam(defaultValue: false, description: 'Build Java bindings', name: 'JavaBindings'),
choice(choices: 'Release\nDebug', description: 'Java bindings build type', name: 'JBBuildType'),
booleanParam(defaultValue: false, description: 'Build Python bindings', name: 'PythonBindings'),
@@ -19,7 +21,6 @@ properties([parameters([
booleanParam(defaultValue: false, description: 'Build docs', name: 'Doxygen'),
string(defaultValue: '4', description: 'How much parallelism should we exploit. "4" is optimal for machines with modest amount of memory and at least 4 cores', name: 'PARALLELISM')])])
-
pipeline {
environment {
CCACHE_DIR = '/opt/.ccache'
@@ -28,12 +29,21 @@ pipeline {
SONAR_TOKEN = credentials('SONAR_TOKEN')
GIT_RAW_BASE_URL = "https://raw.githubusercontent.com/hyperledger/iroha"
DOCKER_REGISTRY_BASENAME = "hyperledger/iroha"
+ JENKINS_DOCKER_IMAGE_DIR = '/tmp/docker'
+ GIT_COMMITER_EMAIL = ''
IROHA_NETWORK = "iroha-0${CHANGE_ID}-${GIT_COMMIT}-${BUILD_NUMBER}"
IROHA_POSTGRES_HOST = "pg-0${CHANGE_ID}-${GIT_COMMIT}-${BUILD_NUMBER}"
IROHA_POSTGRES_USER = "pguser${GIT_COMMIT}"
IROHA_POSTGRES_PASSWORD = "${GIT_COMMIT}"
IROHA_POSTGRES_PORT = 5432
+
+ DOCKER_AGENT_IMAGE = ''
+ DOCKER_IMAGE_FILE = ''
+ WORKSPACE_PATH = ''
+ MERGE_CONDITIONS_SATISFIED = ''
+ REST_PR_CONDITIONS_SATISFIED = ''
+ INITIAL_COMMIT_PR = ''
}
options {
@@ -43,10 +53,13 @@ pipeline {
agent any
stages {
- stage ('Stop same job builds') {
+ stage ('Pre-build') {
agent { label 'master' }
steps {
script {
+ load ".jenkinsci/enums.groovy"
+ def preBuildRoutine = load ".jenkinsci/pre-build.groovy"
+ preBuildRoutine.prepare()
if (GIT_LOCAL_BRANCH != "develop") {
def builds = load ".jenkinsci/cancel-builds-same-job.groovy"
builds.cancelSameJobBuilds()
@@ -54,41 +67,37 @@ pipeline {
}
}
}
- stage('Build Debug') {
- when {
- allOf {
- expression { params.build_type == 'Debug' }
- expression { return params.iroha }
- }
- }
+ stage('Build') {
parallel {
stage ('x86_64_linux') {
when {
beforeAgent true
- expression { return params.x86_64_linux }
+ anyOf {
+ expression { return params.x86_64_linux }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ }
}
- agent { label 'x86_64' }
+ agent { label 'x86_64_aws_build' }
steps {
script {
- debugBuild = load ".jenkinsci/debug-build.groovy"
- coverage = load ".jenkinsci/selected-branches-coverage.groovy"
- if (coverage.selectedBranchesCoverage(['develop', 'master'])) {
- debugBuild.doDebugBuild(true)
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
+ def coverage = load ".jenkinsci/build-coverage.groovy"
+ if (params.build_type == 'Debug') {
+ debugBuild.doDebugBuild(coverage.checkCoverageConditions())
}
- else {
- debugBuild.doDebugBuild()
- }
- if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- releaseBuild = load ".jenkinsci/release-build.groovy"
+ if (params.build_type == 'Release') {
+ def releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
}
}
post {
- always {
+ success {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ if (params.build_type == 'Release') {
+ def post = load ".jenkinsci/post-step.groovy"
+ post.postStep()
+ }
}
}
}
@@ -96,30 +105,31 @@ pipeline {
stage('armv7_linux') {
when {
beforeAgent true
- expression { return params.armv7_linux }
+ anyOf {
+ expression { return params.armv7_linux }
+ // expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ }
}
agent { label 'armv7' }
steps {
script {
- debugBuild = load ".jenkinsci/debug-build.groovy"
- coverage = load ".jenkinsci/selected-branches-coverage.groovy"
- if (!params.x86_64_linux && !params.armv8_linux && !params.x86_64_macos && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
- debugBuild.doDebugBuild(true)
- }
- else {
+ if (params.build_type == 'Debug') {
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
debugBuild.doDebugBuild()
}
- if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- releaseBuild = load ".jenkinsci/release-build.groovy"
+ else {
+ def releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
}
}
post {
- always {
+ success {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ if (params.build_type == 'Release') {
+ def post = load ".jenkinsci/post-step.groovy"
+ post.postStep()
+ }
}
}
}
@@ -127,129 +137,63 @@ pipeline {
stage('armv8_linux') {
when {
beforeAgent true
- expression { return params.armv8_linux }
+ anyOf {
+ expression { return params.armv8_linux }
+ // expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ }
}
agent { label 'armv8' }
steps {
script {
- debugBuild = load ".jenkinsci/debug-build.groovy"
- coverage = load ".jenkinsci/selected-branches-coverage.groovy"
- if (!params.x86_64_linux && !params.x86_64_macos && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
- debugBuild.doDebugBuild(true)
- }
- else {
+ if ( params.build_type == 'Debug') {
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
debugBuild.doDebugBuild()
}
- if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- releaseBuild = load ".jenkinsci/release-build.groovy"
+ else {
+ def releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
}
}
post {
- always {
+ success {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ if (params.build_type == 'Release') {
+ def post = load ".jenkinsci/post-step.groovy"
+ post.postStep()
+ }
}
}
}
}
- stage('x86_64_macos'){
+ stage('x86_64_macos') {
when {
beforeAgent true
- expression { return params.x86_64_macos }
+ anyOf {
+ expression { return INITIAL_COMMIT_PR == "true" }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ expression { return params.x86_64_macos }
+ }
}
agent { label 'mac' }
steps {
script {
- def coverageEnabled = false
- def cmakeOptions = ""
- coverage = load ".jenkinsci/selected-branches-coverage.groovy"
- if (!params.x86_64_linux && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
- coverageEnabled = true
- cmakeOptions = " -DCOVERAGE=ON "
- }
- def scmVars = checkout scm
- env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
- env.IROHA_HOME = "/opt/iroha"
- env.IROHA_BUILD = "${env.IROHA_HOME}/build"
-
- sh """
- ccache --version
- ccache --show-stats
- ccache --zero-stats
- ccache --max-size=5G
- """
- sh """
- cmake \
- -DTESTING=ON \
- -H. \
- -Bbuild \
- -DCMAKE_BUILD_TYPE=${params.build_type} \
- -DIROHA_VERSION=${env.IROHA_VERSION} \
- ${cmakeOptions}
- """
- sh "cmake --build build -- -j${params.PARALLELISM}"
- sh "ccache --show-stats"
- if ( coverageEnabled ) {
- sh "cmake --build build --target coverage.init.info"
+ if (params.build_type == 'Debug') {
+ def macDebugBuild = load ".jenkinsci/mac-debug-build.groovy"
+ macDebugBuild.doDebugBuild()
}
- sh """
- export IROHA_POSTGRES_PASSWORD=${IROHA_POSTGRES_PASSWORD}; \
- export IROHA_POSTGRES_USER=${IROHA_POSTGRES_USER}; \
- mkdir -p /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}; \
- initdb -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ -U ${IROHA_POSTGRES_USER} --pwfile=<(echo ${IROHA_POSTGRES_PASSWORD}); \
- pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ -o '-p 5433' -l /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/events.log start; \
- psql -h localhost -d postgres -p 5433 -U ${IROHA_POSTGRES_USER} --file=<(echo create database ${IROHA_POSTGRES_USER};)
- """
- def testExitCode = sh(script: """cd build && IROHA_POSTGRES_HOST=localhost IROHA_POSTGRES_PORT=5433 ctest --output-on-failure """, returnStatus: true)
- if (testExitCode != 0) {
- currentBuild.result = "UNSTABLE"
- }
- if ( coverageEnabled ) {
- sh "cmake --build build --target cppcheck"
- // Sonar
- if (env.CHANGE_ID != null) {
- sh """
- sonar-scanner \
- -Dsonar.github.disableInlineComments \
- -Dsonar.github.repository='hyperledger/iroha' \
- -Dsonar.analysis.mode=preview \
- -Dsonar.login=${SONAR_TOKEN} \
- -Dsonar.projectVersion=${BUILD_TAG} \
- -Dsonar.github.oauth=${SORABOT_TOKEN}
- """
- }
- sh "cmake --build build --target coverage.info"
- sh "python /usr/local/bin/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
- cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
- }
- if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- releaseBuild = load ".jenkinsci/mac-release-build.groovy"
- releaseBuild.doReleaseBuild()
+ else {
+ def macReleaseBuild = load ".jenkinsci/mac-release-build.groovy"
+ macReleaseBuild.doReleaseBuild()
}
}
}
post {
- always {
+ success {
script {
- timeout(time: 600, unit: "SECONDS") {
- try {
- if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- def artifacts = load ".jenkinsci/artifacts.groovy"
- def commit = env.GIT_COMMIT
- filePaths = [ '\$(pwd)/build/*.tar.gz' ]
- artifacts.uploadArtifacts(filePaths, sprintf('/iroha/macos/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)]))
- }
- }
- finally {
- cleanWs()
- sh """
- pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ stop && \
- rm -rf /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/
- """
- }
+ if (params.build_type == 'Release') {
+ def post = load ".jenkinsci/post-step.groovy"
+ post.macPostStep()
}
}
}
@@ -257,29 +201,54 @@ pipeline {
}
}
}
- stage('Build Release') {
+ stage('Pre-Coverage') {
+ when {
+ beforeAgent true
+ anyOf {
+ expression { params.coverage } // by request
+ expression { return INITIAL_COMMIT_PR == "true" }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ allOf {
+ expression { return params.build_type == 'Debug' }
+ expression { return env.GIT_LOCAL_BRANCH ==~ /master/ }
+ }
+ }
+ }
+ agent { label 'x86_64_aws_cov'}
+ steps {
+ script {
+ def coverage = load '.jenkinsci/debug-build.groovy'
+ coverage.doPreCoverageStep()
+ }
+ }
+ }
+ stage('Tests') {
when {
- expression { params.build_type == 'Release' }
- expression { return params.iroha }
+ beforeAgent true
+ expression { return params.build_type == "Debug" }
}
parallel {
stage('x86_64_linux') {
when {
beforeAgent true
- expression { return params.x86_64_linux }
+ anyOf {
+ expression { return params.x86_64_linux }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ }
}
- agent { label 'x86_64' }
+ agent { label 'x86_64_aws_test' }
steps {
script {
- def releaseBuild = load ".jenkinsci/release-build.groovy"
- releaseBuild.doReleaseBuild()
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
+ def testSelect = load ".jenkinsci/test-launcher.groovy"
+ debugBuild.doTestStep(testSelect.chooseTestType())
}
}
post {
- always {
+ cleanup {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ def clean = load ".jenkinsci/docker-cleanup.groovy"
+ clean.doDockerNetworkCleanup()
}
}
}
@@ -287,20 +256,24 @@ pipeline {
stage('armv7_linux') {
when {
beforeAgent true
- expression { return params.armv7_linux }
+ allOf {
+ expression { return params.armv7_linux }
+ // expression { return MERGE_CONDITIONS_SATISFIED == "false" }
+ }
}
agent { label 'armv7' }
steps {
script {
- def releaseBuild = load ".jenkinsci/release-build.groovy"
- releaseBuild.doReleaseBuild()
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
+ def testSelect = load ".jenkinsci/test-launcher.groovy"
+ debugBuild.doTestStep(testSelect.chooseTestType())
}
}
post {
- always {
+ cleanup {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ def clean = load ".jenkinsci/docker-cleanup.groovy"
+ clean.doDockerNetworkCleanup()
}
}
}
@@ -308,20 +281,24 @@ pipeline {
stage('armv8_linux') {
when {
beforeAgent true
- expression { return params.armv8_linux }
+ allOf {
+ expression { return params.armv8_linux }
+ // expression { return MERGE_CONDITIONS_SATISFIED == "false" }
+ }
}
agent { label 'armv8' }
steps {
script {
- def releaseBuild = load ".jenkinsci/release-build.groovy"
- releaseBuild.doReleaseBuild()
+ def debugBuild = load ".jenkinsci/debug-build.groovy"
+ def testSelect = load ".jenkinsci/test-launcher.groovy"
+ debugBuild.doTestStep(testSelect.chooseTestType())
}
}
post {
- always {
+ cleanup {
script {
- post = load ".jenkinsci/linux-post-step.groovy"
- post.linuxPostStep()
+ def clean = load ".jenkinsci/docker-cleanup.groovy"
+ clean.doDockerNetworkCleanup()
}
}
}
@@ -329,97 +306,154 @@ pipeline {
stage('x86_64_macos') {
when {
beforeAgent true
- expression { return params.x86_64_macos }
+ allOf {
+ expression { return INITIAL_COMMIT_PR == "false" }
+ expression { return MERGE_CONDITIONS_SATISFIED == "false" }
+ expression { return params.x86_64_macos }
+ }
}
agent { label 'mac' }
steps {
script {
- def releaseBuild = load ".jenkinsci/mac-release-build.groovy"
- releaseBuild.doReleaseBuild()
+ def macDebugBuild = load ".jenkinsci/mac-debug-build.groovy"
+ def testSelect = load ".jenkinsci/test-launcher.groovy"
+ macDebugBuild.doTestStep(testSelect.chooseTestType())
}
}
post {
- always {
+ cleanup {
script {
- timeout(time: 600, unit: "SECONDS") {
- try {
- if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
- def artifacts = load ".jenkinsci/artifacts.groovy"
- def commit = env.GIT_COMMIT
- filePaths = [ '\$(pwd)/build/*.tar.gz' ]
- artifacts.uploadArtifacts(filePaths, sprintf('/iroha/macos/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)]))
- }
- }
- finally {
- cleanWs()
- }
- }
+ sh "pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ stop"
}
}
}
}
}
}
- stage('Build docs') {
+ stage('Post-Coverage') {
when {
beforeAgent true
- allOf {
- expression { return params.Doxygen }
- expression { GIT_LOCAL_BRANCH ==~ /(master|develop)/ }
+ anyOf {
+ expression { params.coverage } // by request
+ expression { return INITIAL_COMMIT_PR == "true" }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ allOf {
+ expression { return params.build_type == 'Debug' }
+ expression { return env.GIT_LOCAL_BRANCH ==~ /master/ }
+ }
}
}
- // build docs on any vacant node. Prefer `x86_64` over
- // others as nodes are more powerful
- agent { label 'x86_64 || arm' }
- steps {
- script {
- def doxygen = load ".jenkinsci/doxygen.groovy"
- docker.image("${env.DOCKER_IMAGE}").inside {
- def scmVars = checkout scm
- doxygen.doDoxygen()
+ parallel {
+ stage('lcov_cobertura') {
+ agent { label 'x86_64_aws_cov' }
+ steps {
+ script {
+ def coverage = load '.jenkinsci/debug-build.groovy'
+ coverage.doPostCoverageCoberturaStep()
+ }
+ }
+ }
+ stage('sonarqube') {
+ agent { label 'x86_64_aws_cov' }
+ steps {
+ script {
+ def coverage = load '.jenkinsci/debug-build.groovy'
+ coverage.doPostCoverageSonarStep()
+ }
}
}
}
}
- stage('Build bindings') {
- when {
- beforeAgent true
- expression { return params.bindings }
- }
+ stage ('Build rest') {
parallel {
- stage('Linux bindings') {
+ stage('linux_release') {
+ when {
+ beforeAgent true
+ anyOf {
+ allOf {
+ expression { return params.x86_64_linux }
+ expression { return params.build_type == 'Debug' }
+ expression { return env.GIT_LOCAL_BRANCH ==~ /(develop|master|trunk)/ }
+ }
+ expression { return MERGE_CONDITIONS_SATISFIED == "true" }
+ }
+ }
+ agent { label 'x86_64_aws_build' }
+ steps {
+ script {
+ def releaseBuild = load '.jenkinsci/release-build.groovy'
+ releaseBuild.doReleaseBuild()
+ }
+ }
+ post {
+ success {
+ script {
+ if (params.build_type == 'Release') {
+ def post = load ".jenkinsci/post-step.groovy"
+ post.postStep()
+ }
+ }
+ }
+ }
+ }
+ stage('docs') {
+ when {
+ beforeAgent true
+ anyOf {
+ expression { return params.Doxygen }
+ expression { return GIT_LOCAL_BRANCH ==~ /(master|develop)/ }
+ expression { return REST_PR_CONDITIONS_SATISFIED == "true" }
+ }
+ }
+ agent { label 'x86_64_aws_cov' }
+ steps {
+ script {
+ def doxygen = load ".jenkinsci/doxygen.groovy"
+ sh "docker load -i ${JENKINS_DOCKER_IMAGE_DIR}/${DOCKER_IMAGE_FILE}"
+ def iC = docker.image("${DOCKER_AGENT_IMAGE}")
+ iC.inside {
+ def scmVars = checkout scm
+ doxygen.doDoxygen()
+ }
+ }
+ }
+ }
+ stage('bindings') {
when {
beforeAgent true
- expression { return params.x86_64_linux }
+ anyOf {
+ expression { return params.bindings }
+ expression { return REST_PR_CONDITIONS_SATISFIED == "true" }
+ }
}
- agent { label 'x86_64' }
+ agent { label 'x86_64_aws_bindings' }
environment {
- JAVA_HOME = "/usr/lib/jvm/java-8-oracle"
+ JAVA_HOME = '/usr/lib/jvm/java-8-oracle'
}
steps {
script {
def bindings = load ".jenkinsci/bindings.groovy"
def dPullOrBuild = load ".jenkinsci/docker-pull-or-build.groovy"
def platform = sh(script: 'uname -m', returnStdout: true).trim()
- if (params.JavaBindings || params.PythonBindings) {
+ if (params.JavaBindings || params.PythonBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
def iC = dPullOrBuild.dockerPullOrUpdate(
"$platform-develop-build",
"${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile",
"${env.GIT_RAW_BASE_URL}/${env.GIT_PREVIOUS_COMMIT}/docker/develop/Dockerfile",
"${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile",
['PARALLELISM': params.PARALLELISM])
- if (params.JavaBindings) {
- iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ if (params.JavaBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact --user root") {
bindings.doJavaBindings('linux', params.JBBuildType)
}
}
- if (params.PythonBindings) {
- iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ if (params.PythonBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact --user root") {
bindings.doPythonBindings('linux', params.PBBuildType)
}
}
}
- if (params.AndroidBindings) {
+ if (params.AndroidBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
def iC = dPullOrBuild.dockerPullOrUpdate(
"android-${params.ABPlatform}-${params.ABBuildType}",
"${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/Dockerfile",
@@ -428,7 +462,7 @@ pipeline {
['PARALLELISM': params.PARALLELISM, 'PLATFORM': params.ABPlatform, 'BUILD_TYPE': params.ABBuildType])
sh "curl -L -o /tmp/${env.GIT_COMMIT}/entrypoint.sh ${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/entrypoint.sh"
sh "chmod +x /tmp/${env.GIT_COMMIT}/entrypoint.sh"
- iC.inside("-v /tmp/${env.GIT_COMMIT}/entrypoint.sh:/entrypoint.sh:ro -v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/entrypoint.sh:/entrypoint.sh:ro -v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact --user root") {
bindings.doAndroidBindings(params.ABABIVersion)
}
}
@@ -439,39 +473,42 @@ pipeline {
script {
def artifacts = load ".jenkinsci/artifacts.groovy"
def commit = env.GIT_COMMIT
- if (params.JavaBindings) {
+ if (params.JavaBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
javaBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/java-bindings-*.zip' ]
artifacts.uploadArtifacts(javaBindingsFilePaths, '/iroha/bindings/java')
}
- if (params.PythonBindings) {
+ if (params.PythonBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
pythonBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/python-bindings-*.zip' ]
artifacts.uploadArtifacts(pythonBindingsFilePaths, '/iroha/bindings/python')
}
- if (params.AndroidBindings) {
+ if (params.AndroidBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
androidBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/android-bindings-*.zip' ]
artifacts.uploadArtifacts(androidBindingsFilePaths, '/iroha/bindings/android')
}
}
}
cleanup {
- sh "rm -rf /tmp/${env.GIT_COMMIT}"
+ sh "sudo rm -rf /tmp/${env.GIT_COMMIT}"
cleanWs()
}
}
}
- stage ('Windows bindings') {
+ stage ('windows_bindings') {
when {
beforeAgent true
- expression { return params.x86_64_win }
+ anyOf {
+ expression { return params.x86_64_win }
+ expression { return REST_PR_CONDITIONS_SATISFIED == "true" }
+ }
}
agent { label 'win' }
steps {
script {
def bindings = load ".jenkinsci/bindings.groovy"
- if (params.JavaBindings) {
+ if (params.JavaBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
bindings.doJavaBindings('windows', params.JBBuildType)
}
- if (params.PythonBindings) {
+ if (params.PythonBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
bindings.doPythonBindings('windows', params.PBBuildType)
}
}
@@ -481,20 +518,70 @@ pipeline {
script {
def artifacts = load ".jenkinsci/artifacts.groovy"
def commit = env.GIT_COMMIT
- if (params.JavaBindings) {
+ if (params.JavaBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
javaBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/java-bindings-*.zip' ]
artifacts.uploadArtifacts(javaBindingsFilePaths, '/iroha/bindings/java')
}
- if (params.PythonBindings) {
+ if (params.PythonBindings || REST_PR_CONDITIONS_SATISFIED == "true") {
pythonBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/python-bindings-*.zip' ]
artifacts.uploadArtifacts(pythonBindingsFilePaths, '/iroha/bindings/python')
}
}
}
- cleanup {
- sh "rm -rf /tmp/${env.GIT_COMMIT}"
- cleanWs()
- }
+ }
+ }
+ }
+ }
+ }
+ post {
+ success {
+ script {
+ // merge pull request if everything is ok and clean stale docker images stored on EFS
+ if ( params.merge_pr ) {
+ def merge = load ".jenkinsci/github-api.groovy"
+ if (merge.mergePullRequest()) {
+ currentBuild.result = "SUCCESS"
+ def clean = load ".jenkinsci/docker-cleanup.groovy"
+ clean.doStaleDockerImagesCleanup()
+ }
+ else {
+ currentBuild.result = "FAILURE"
+ }
+ }
+ }
+ }
+ cleanup {
+ script {
+ def post = load ".jenkinsci/post-step.groovy"
+ def clean = load ".jenkinsci/docker-cleanup.groovy"
+ def notify = load ".jenkinsci/notifications.groovy"
+ notify.notifyBuildResults()
+
+ if (params.x86_64_linux || params.merge_pr) {
+ node ('x86_64_aws_test') {
+ post.cleanUp()
+ }
+ }
+ if (params.armv8_linux) {
+ node ('armv8') {
+ post.cleanUp()
+ clean.doDockerCleanup()
+ }
+ }
+ if (params.armv7_linux) {
+ node ('armv7') {
+ post.cleanUp()
+ clean.doDockerCleanup()
+ }
+ }
+ if (params.x86_64_macos || params.merge_pr) {
+ node ('mac') {
+ post.macCleanUp()
+ }
+ }
+ if (params.x86_64_win || params.merge_pr) {
+ node ('win') {
+ post.cleanUp()
}
}
}
From f8537eb0c154b8498c8820270a2fd47e933d0736 Mon Sep 17 00:00:00 2001
From: tyvision
Date: Fri, 29 Jun 2018 21:18:48 +0300
Subject: [PATCH 13/97] hotfixes to #1392
Signed-off-by: tyvision
---
.jenkinsci/release-build.groovy | 8 ++++++--
Jenkinsfile | 11 +++++++++--
2 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/.jenkinsci/release-build.groovy b/.jenkinsci/release-build.groovy
index 422b567c83..5403512933 100644
--- a/.jenkinsci/release-build.groovy
+++ b/.jenkinsci/release-build.groovy
@@ -52,7 +52,9 @@ def doReleaseBuild() {
sh "chmod +x /tmp/${env.GIT_COMMIT}/entrypoint.sh"
iCRelease = docker.build("${DOCKER_REGISTRY_BASENAME}:${GIT_COMMIT}-${BUILD_NUMBER}-release", "--no-cache -f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT}")
if (env.GIT_LOCAL_BRANCH == 'develop') {
- iCRelease.push("${platform}-develop")
+ withDockerRegistry([ credentialsId: "docker-hub-credentials", url: "" ]) {
+ iCRelease.push("${platform}-develop")
+ }
if (manifest.manifestSupportEnabled()) {
manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:develop",
["${DOCKER_REGISTRY_BASENAME}:x86_64-develop",
@@ -73,7 +75,9 @@ def doReleaseBuild() {
}
}
else if (env.GIT_LOCAL_BRANCH == 'master') {
- iCRelease.push("${platform}-latest")
+ withDockerRegistry([ credentialsId: "docker-hub-credentials", url: "" ]) {
+ iCRelease.push("${platform}-latest")
+ }
if (manifest.manifestSupportEnabled()) {
manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:latest",
["${DOCKER_REGISTRY_BASENAME}:x86_64-latest",
diff --git a/Jenkinsfile b/Jenkinsfile
index cb78c4b76e..cfea416560 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -215,6 +215,7 @@ pipeline {
}
}
agent { label 'x86_64_aws_cov'}
+ options { skipDefaultCheckout() }
steps {
script {
def coverage = load '.jenkinsci/debug-build.groovy'
@@ -237,6 +238,7 @@ pipeline {
}
}
agent { label 'x86_64_aws_test' }
+ options { skipDefaultCheckout() }
steps {
script {
def debugBuild = load ".jenkinsci/debug-build.groovy"
@@ -262,6 +264,7 @@ pipeline {
}
}
agent { label 'armv7' }
+ options { skipDefaultCheckout() }
steps {
script {
def debugBuild = load ".jenkinsci/debug-build.groovy"
@@ -287,6 +290,7 @@ pipeline {
}
}
agent { label 'armv8' }
+ options { skipDefaultCheckout() }
steps {
script {
def debugBuild = load ".jenkinsci/debug-build.groovy"
@@ -313,6 +317,7 @@ pipeline {
}
}
agent { label 'mac' }
+ options { skipDefaultCheckout() }
steps {
script {
def macDebugBuild = load ".jenkinsci/mac-debug-build.groovy"
@@ -346,6 +351,7 @@ pipeline {
parallel {
stage('lcov_cobertura') {
agent { label 'x86_64_aws_cov' }
+ options { skipDefaultCheckout() }
steps {
script {
def coverage = load '.jenkinsci/debug-build.groovy'
@@ -355,6 +361,7 @@ pipeline {
}
stage('sonarqube') {
agent { label 'x86_64_aws_cov' }
+ options { skipDefaultCheckout() }
steps {
script {
def coverage = load '.jenkinsci/debug-build.groovy'
@@ -379,6 +386,7 @@ pipeline {
}
}
agent { label 'x86_64_aws_build' }
+ options { skipDefaultCheckout() }
steps {
script {
def releaseBuild = load '.jenkinsci/release-build.groovy'
@@ -401,11 +409,10 @@ pipeline {
beforeAgent true
anyOf {
expression { return params.Doxygen }
- expression { return GIT_LOCAL_BRANCH ==~ /(master|develop)/ }
expression { return REST_PR_CONDITIONS_SATISFIED == "true" }
}
}
- agent { label 'x86_64_aws_cov' }
+ agent { label 'x86_64_aws_docs' }
steps {
script {
def doxygen = load ".jenkinsci/doxygen.groovy"
From f07b9e314114e310dc58499a437f87305d4cc207 Mon Sep 17 00:00:00 2001
From: Akvinikym
Date: Mon, 2 Jul 2018 18:40:27 +0300
Subject: [PATCH 14/97] Stateful error responses to log (#1493)
Stateful error responses are now written to log
Signed-off-by: Akvinikym
---
irohad/ametsuchi/impl/temporary_wsv_impl.cpp | 89 +++++++-----
irohad/ametsuchi/impl/temporary_wsv_impl.hpp | 7 +-
irohad/ametsuchi/temporary_wsv.hpp | 15 +-
irohad/simulator/impl/simulator.cpp | 12 +-
irohad/validation/CMakeLists.txt | 3 +-
.../impl/stateful_validator_impl.cpp | 134 +++++++++++++++---
.../impl/stateful_validator_impl.hpp | 12 +-
irohad/validation/stateful_validator.hpp | 6 +-
.../validation/stateful_validator_common.hpp | 46 ++++++
.../irohad/simulator/simulator_test.cpp | 8 +-
.../irohad/validation/validation_mocks.hpp | 3 +-
11 files changed, 249 insertions(+), 86 deletions(-)
create mode 100644 irohad/validation/stateful_validator_common.hpp
diff --git a/irohad/ametsuchi/impl/temporary_wsv_impl.cpp b/irohad/ametsuchi/impl/temporary_wsv_impl.cpp
index ea5637ece1..5045bf09df 100644
--- a/irohad/ametsuchi/impl/temporary_wsv_impl.cpp
+++ b/irohad/ametsuchi/impl/temporary_wsv_impl.cpp
@@ -38,49 +38,68 @@ namespace iroha {
transaction_->exec("BEGIN;");
}
- bool TemporaryWsvImpl::apply(
+ expected::Result
+ TemporaryWsvImpl::apply(
const shared_model::interface::Transaction &tx,
- std::function apply_function) {
+ std::function(
+ const shared_model::interface::Transaction &, WsvQuery &)>
+ apply_function) {
const auto &tx_creator = tx.creatorAccountId();
command_executor_->setCreatorAccountId(tx_creator);
command_validator_->setCreatorAccountId(tx_creator);
- auto execute_command = [this, &tx_creator](auto &command) {
- auto account = wsv_->getAccount(tx_creator).value();
-
- // Temporary variant: going to be a chain of results in future pull
- // requests
- auto validation_result =
- boost::apply_visitor(*command_validator_, command.get())
- .match([](expected::Value &) { return true; },
- [this](expected::Error &e) {
- log_->error(e.error.toString());
- return false;
- });
- if (not validation_result) {
- return false;
- }
- auto execution_result =
- boost::apply_visitor(*command_executor_, command.get());
- return execution_result.match(
- [](expected::Value &v) { return true; },
- [this](expected::Error &e) {
- log_->error(e.error.toString());
- return false;
- });
+ auto execute_command = [this](auto &command, size_t command_index)
+ -> expected::Result {
+ // Validate command
+ return expected::map_error(
+ boost::apply_visitor(*command_validator_, command.get()),
+ [command_index](CommandError &error) {
+ return validation::CommandNameAndError{
+ error.command_name,
+ (boost::format("stateful validation error: could "
+ "not validate "
+ "command with index %d: %s")
+ % command_index % error.toString())
+ .str()};
+ })
+ // Execute commands
+ .and_res(expected::map_error(
+ boost::apply_visitor(*command_executor_, command.get()),
+ [command_index](CommandError &error) {
+ return validation::CommandNameAndError{
+ error.command_name,
+ (boost::format("stateful validation error: could not "
+ "execute command with index %d: %s")
+ % command_index % error.toString())
+ .str()};
+ }));
};
transaction_->exec("SAVEPOINT savepoint_;");
- auto result =
- apply_function(tx, *wsv_)
- and std::all_of(
- tx.commands().begin(), tx.commands().end(), execute_command);
- if (result) {
+
+ return apply_function(tx, *wsv_) | [this, &execute_command, &tx]()
+ -> expected::Result {
+ // check transaction's commands validness
+ const auto &commands = tx.commands();
+ validation::CommandNameAndError cmd_name_error;
+ for (size_t i = 0; i < commands.size(); ++i) {
+ // in case of failed command, rollback and return
+ if (not execute_command(commands[i], i)
+ .match(
+ [](expected::Value &) { return true; },
+ [&cmd_name_error](
+ expected::Error
+ &error) {
+ cmd_name_error = error.error;
+ return false;
+ })) {
+ transaction_->exec("ROLLBACK TO SAVEPOINT savepoint_;");
+ return expected::makeError(cmd_name_error);
+ }
+ }
+ // success
transaction_->exec("RELEASE SAVEPOINT savepoint_;");
- } else {
- transaction_->exec("ROLLBACK TO SAVEPOINT savepoint_;");
- }
- return result;
+ return {};
+ };
}
TemporaryWsvImpl::~TemporaryWsvImpl() {
diff --git a/irohad/ametsuchi/impl/temporary_wsv_impl.hpp b/irohad/ametsuchi/impl/temporary_wsv_impl.hpp
index 0b962d4c43..c4c27c5a4e 100644
--- a/irohad/ametsuchi/impl/temporary_wsv_impl.hpp
+++ b/irohad/ametsuchi/impl/temporary_wsv_impl.hpp
@@ -33,10 +33,11 @@ namespace iroha {
TemporaryWsvImpl(std::unique_ptr connection,
std::unique_ptr transaction);
- bool apply(
+ expected::Result apply(
const shared_model::interface::Transaction &,
- std::function function) override;
+ std::function(
+ const shared_model::interface::Transaction &, WsvQuery &)>
+ function) override;
~TemporaryWsvImpl() override;
diff --git a/irohad/ametsuchi/temporary_wsv.hpp b/irohad/ametsuchi/temporary_wsv.hpp
index edd02851d3..f7ae5a6ef8 100644
--- a/irohad/ametsuchi/temporary_wsv.hpp
+++ b/irohad/ametsuchi/temporary_wsv.hpp
@@ -22,6 +22,7 @@
#include "ametsuchi/wsv_command.hpp"
#include "ametsuchi/wsv_query.hpp"
+#include "validation/stateful_validator_common.hpp"
namespace shared_model {
namespace interface {
@@ -47,14 +48,16 @@ namespace iroha {
* Function parameters:
* - Transaction @see transaction
* - WsvQuery - world state view query interface for temporary storage
- * Function returns true if the transaction is successfully applied, false
- * otherwise.
- * @return True if transaction was successfully applied, false otherwise
+ * Function returns void result value, if transaction is successfully
+ * applied, and string result error otherwise
+ * @return void result value, if transaction was successfully applied, and
+ * vector of strings with errors of all failed command otherwise
*/
- virtual bool apply(
+ virtual expected::Result apply(
const shared_model::interface::Transaction &,
- std::function function) = 0;
+ std::function(
+ const shared_model::interface::Transaction &, WsvQuery &)>
+ function) = 0;
virtual ~TemporaryWsv() = default;
};
diff --git a/irohad/simulator/impl/simulator.cpp b/irohad/simulator/impl/simulator.cpp
index 72efa5ec86..db68d96fde 100644
--- a/irohad/simulator/impl/simulator.cpp
+++ b/irohad/simulator/impl/simulator.cpp
@@ -93,9 +93,15 @@ namespace iroha {
temporaryStorageResult.match(
[&](expected::Value>
&temporaryStorage) {
- auto validated_proposal =
+ auto validated_proposal_and_errors =
validator_->validate(proposal, *temporaryStorage.value);
- notifier_.get_subscriber().on_next(validated_proposal);
+ // Temporary variant; errors are lost now, but then they are going
+ // to be handled upwards
+ notifier_.get_subscriber().on_next(
+ validated_proposal_and_errors.first);
+ for (const auto &transaction_and_error: validated_proposal_and_errors.second) {
+ log_->error(transaction_and_error.first.second);
+ }
},
[&](expected::Error &error) {
log_->error(error.error);
@@ -116,7 +122,7 @@ namespace iroha {
return static_cast(tx);
});
- auto sign_and_send = [this](const auto& any_block){
+ auto sign_and_send = [this](const auto &any_block) {
crypto_signer_->sign(*any_block);
block_notifier_.get_subscriber().on_next(any_block);
};
diff --git a/irohad/validation/CMakeLists.txt b/irohad/validation/CMakeLists.txt
index d0be3ffb91..ce6587ad11 100644
--- a/irohad/validation/CMakeLists.txt
+++ b/irohad/validation/CMakeLists.txt
@@ -26,7 +26,8 @@ target_link_libraries(stateful_validator
)
add_library(chain_validator
- impl/chain_validator_impl.cpp)
+ impl/chain_validator_impl.cpp
+ )
target_link_libraries(chain_validator
rxcpp
shared_model_interfaces
diff --git a/irohad/validation/impl/stateful_validator_impl.cpp b/irohad/validation/impl/stateful_validator_impl.cpp
index f273d51699..bcce329c06 100644
--- a/irohad/validation/impl/stateful_validator_impl.cpp
+++ b/irohad/validation/impl/stateful_validator_impl.cpp
@@ -17,46 +17,131 @@
#include "validation/impl/stateful_validator_impl.hpp"
+#include
#include
+#include
#include
+#include
#include "builders/protobuf/proposal.hpp"
+#include "common/result.hpp"
#include "validation/utils.hpp"
namespace iroha {
namespace validation {
+ /**
+ * Forms a readable error string from transaction signatures and account
+ * signatories
+ * @param signatures of the transaction
+ * @param signatories of the transaction creator
+ * @return well-formed error string
+ */
+ static std::string formSignaturesErrorMsg(
+ const shared_model::interface::types::SignatureRangeType &signatures,
+ const std::vector
+ &signatories) {
+ std::string signatures_string, signatories_string;
+ for (const auto &signature : signatures) {
+ signatures_string.append(signature.publicKey().toString().append("\n"));
+ }
+ for (const auto &signatory : signatories) {
+ signatories_string.append(signatory.toString().append("\n"));
+ }
+ return (boost::format(
+ "stateful validator error: signatures in transaction are not "
+ "account signatories:\n"
+ "signatures' public keys: %s\n"
+ "signatories: %s")
+ % signatures_string % signatories_string)
+ .str();
+ }
+
StatefulValidatorImpl::StatefulValidatorImpl() {
log_ = logger::log("SFV");
}
- std::shared_ptr
- StatefulValidatorImpl::validate(
+ validation::VerifiedProposalAndErrors StatefulValidatorImpl::validate(
const shared_model::interface::Proposal &proposal,
ametsuchi::TemporaryWsv &temporaryWsv) {
log_->info("transactions in proposal: {}",
proposal.transactions().size());
auto checking_transaction = [](const auto &tx, auto &queries) {
- return bool(queries.getAccount(tx.creatorAccountId()) |
- [&](const auto &account) {
- // Check if tx creator has account and has quorum to
- // execute transaction
- return boost::size(tx.signatures()) >= account->quorum()
- ? queries.getSignatories(tx.creatorAccountId())
- : boost::none;
- }
- |
- [&](const auto &signatories) {
- // Check if signatures in transaction are account
- // signatory
- return signaturesSubset(tx.signatures(), signatories)
- ? boost::make_optional(signatories)
- : boost::none;
- });
+ return expected::Result(
+ [&]() -> expected::Result<
+ std::shared_ptr,
+ validation::CommandNameAndError> {
+ // Check if tx creator has account
+ auto account = queries.getAccount(tx.creatorAccountId());
+ if (account) {
+ return expected::makeValue(*account);
+ }
+ return expected::makeError(validation::CommandNameAndError{
+ "Initial transaction verification: no such account",
+ (boost::format("stateful validator error: could not fetch "
+ "account with id %s")
+ % tx.creatorAccountId())
+ .str()});
+ }() |
+ [&](const auto &account)
+ -> expected::Result<
+ std::vector<
+ shared_model::interface::types::PubkeyType>,
+ validation::CommandNameAndError> {
+ // Check if account has signatories and quorum to execute
+ // transaction
+ if (boost::size(tx.signatures()) >= account->quorum()) {
+ auto signatories =
+ queries.getSignatories(tx.creatorAccountId());
+ if (signatories) {
+ return expected::makeValue(*signatories);
+ }
+ return expected::makeError(validation::CommandNameAndError{
+ "Initial transaction verification: could not fetch "
+ "signatories",
+ (boost::format("stateful validator error: could not fetch "
+ "signatories of "
+ "account %s")
+ % tx.creatorAccountId())
+ .str()});
+ }
+ return expected::makeError(validation::CommandNameAndError{
+ "Initial transaction verification: not enough signatures",
+ (boost::format(
+ "stateful validator error: not enough "
+ "signatures in transaction; account's quorum %d, "
+ "transaction's "
+ "signatures amount %d")
+ % account->quorum() % boost::size(tx.signatures()))
+ .str()});
+ } | [&tx](const auto &signatories)
+ -> expected::Result {
+ // Check if signatures in transaction are in account
+ // signatory
+ if (signaturesSubset(tx.signatures(), signatories)) {
+ return {};
+ }
+ return expected::makeError(validation::CommandNameAndError{
+ "Initial transaction verification: signatures are not "
+ "account's signatories",
+ formSignaturesErrorMsg(tx.signatures(), signatories)});
+ });
};
- // Filter only valid transactions
- auto filter = [&temporaryWsv, checking_transaction](auto &tx) {
- return temporaryWsv.apply(tx, checking_transaction);
+ // Filter only valid transactions and accumulate errors
+ auto transactions_errors_log = validation::TransactionsErrors{};
+ auto filter = [&temporaryWsv,
+ checking_transaction,
+ &transactions_errors_log](auto &tx) {
+ return temporaryWsv.apply(tx, checking_transaction)
+ .match(
+ [](expected::Value &) { return true; },
+ [&transactions_errors_log,
+ &tx](expected::Error &error) {
+ transactions_errors_log.push_back(
+ std::make_pair(error.error, tx.hash()));
+ return false;
+ });
};
// TODO: kamilsa IR-1010 20.02.2018 rework validation logic, so that this
@@ -67,6 +152,7 @@ namespace iroha {
| boost::adaptors::transformed([](auto &tx) {
return static_cast(tx);
});
+
auto validated_proposal = shared_model::proto::ProposalBuilder()
.createdTime(proposal.createdTime())
.height(proposal.height())
@@ -76,8 +162,10 @@ namespace iroha {
log_->info("transactions in verified proposal: {}",
validated_proposal.transactions().size());
- return std::make_shared(
- validated_proposal.getTransport());
+ return std::make_pair(std::make_shared(
+ validated_proposal.getTransport()),
+ transactions_errors_log);
}
+
} // namespace validation
} // namespace iroha
diff --git a/irohad/validation/impl/stateful_validator_impl.hpp b/irohad/validation/impl/stateful_validator_impl.hpp
index dcb47ea67f..8eeb643ee7 100644
--- a/irohad/validation/impl/stateful_validator_impl.hpp
+++ b/irohad/validation/impl/stateful_validator_impl.hpp
@@ -31,20 +31,12 @@ namespace iroha {
public:
StatefulValidatorImpl();
- /**
- * Function perform stateful validation on proposal
- * and return proposal with valid transactions
- * @param proposal - proposal for validation
- * @param wsv - temporary wsv for validation,
- * this wsv not affected on ledger,
- * all changes after removing wsv will be ignored
- * @return proposal with valid transactions
- */
- std::shared_ptr validate(
+ VerifiedProposalAndErrors validate(
const shared_model::interface::Proposal &proposal,
ametsuchi::TemporaryWsv &temporaryWsv) override;
logger::Logger log_;
+
};
} // namespace validation
} // namespace iroha
diff --git a/irohad/validation/stateful_validator.hpp b/irohad/validation/stateful_validator.hpp
index 4e267b226f..a1bfe7164f 100644
--- a/irohad/validation/stateful_validator.hpp
+++ b/irohad/validation/stateful_validator.hpp
@@ -18,6 +18,7 @@ limitations under the License.
#include "ametsuchi/temporary_wsv.hpp"
#include "interfaces/iroha_internal/proposal.hpp"
+#include "validation/stateful_validator_common.hpp"
namespace iroha {
namespace validation {
@@ -36,9 +37,10 @@ namespace iroha {
* @param wsv - temporary wsv for validation,
* this wsv not affected on ledger,
* all changes after removing wsv will be ignored
- * @return proposal with valid transactions
+ * @return proposal with valid transactions and errors, which appeared in
+ * a process of validating
*/
- virtual std::shared_ptr validate(
+ virtual VerifiedProposalAndErrors validate(
const shared_model::interface::Proposal &proposal,
ametsuchi::TemporaryWsv &temporaryWsv) = 0;
};
diff --git a/irohad/validation/stateful_validator_common.hpp b/irohad/validation/stateful_validator_common.hpp
new file mode 100644
index 0000000000..af57a695c1
--- /dev/null
+++ b/irohad/validation/stateful_validator_common.hpp
@@ -0,0 +1,46 @@
+/**
+ * Copyright Soramitsu Co., Ltd. All Rights Reserved.
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+#ifndef IROHA_STATEFUL_VALIDATOR_COMMON_HPP
+#define IROHA_STATEFUL_VALIDATOR_COMMON_HPP
+
+#include "common/types.hpp"
+
+namespace shared_model {
+ namespace interface {
+ class Proposal;
+ }
+}
+
+namespace iroha {
+ namespace validation {
+
+ /// Name of the failed command
+ using CommandName = std::string;
+
+ /// Error, with which the command failed
+ using CommandError = std::string;
+
+ /// Failed command's name and error
+ using CommandNameAndError = std::pair;
+
+ /// Type of per-transaction errors, which appeared during validation
+ /// process; contains names of commands, commands errors themselves and
+ /// transaction hashes
+ using TransactionsErrors =
+ std::vector>;
+
+ /// Type of verified proposal and errors appeared in the process; first
+ /// dimension of errors vector is transaction, second is error itself with
+ /// number of transaction, where it happened
+ using VerifiedProposalAndErrors =
+ std::pair,
+ TransactionsErrors>;
+
+ } // namespace validation
+} // namespace iroha
+
+#endif // IROHA_STATEFUL_VALIDATOR_COMMON_HPP
diff --git a/test/module/irohad/simulator/simulator_test.cpp b/test/module/irohad/simulator/simulator_test.cpp
index 45827be262..c1d0f92972 100644
--- a/test/module/irohad/simulator/simulator_test.cpp
+++ b/test/module/irohad/simulator/simulator_test.cpp
@@ -15,7 +15,8 @@
* limitations under the License.
*/
-#include "simulator/impl/simulator.hpp"
+#include
+
#include "backend/protobuf/transaction.hpp"
#include "builders/protobuf/proposal.hpp"
#include "builders/protobuf/transaction.hpp"
@@ -27,6 +28,7 @@
#include "module/shared_model/builders/protobuf/test_block_builder.hpp"
#include "module/shared_model/builders/protobuf/test_proposal_builder.hpp"
#include "module/shared_model/cryptography/crypto_model_signer_mock.hpp"
+#include "simulator/impl/simulator.hpp"
using namespace iroha;
using namespace iroha::validation;
@@ -138,7 +140,9 @@ TEST_F(SimulatorTest, ValidWhenPreviousBlock) {
EXPECT_CALL(*query, getTopBlockHeight()).WillOnce(Return(1));
- EXPECT_CALL(*validator, validate(_, _)).WillOnce(Return(proposal));
+ EXPECT_CALL(*validator, validate(_, _))
+ .WillOnce(Return(
+ std::make_pair(proposal, iroha::validation::TransactionsErrors{})));
EXPECT_CALL(*ordering_gate, on_proposal())
.WillOnce(Return(rxcpp::observable<>::empty<
diff --git a/test/module/irohad/validation/validation_mocks.hpp b/test/module/irohad/validation/validation_mocks.hpp
index 5bdddbfa80..646f3f3500 100644
--- a/test/module/irohad/validation/validation_mocks.hpp
+++ b/test/module/irohad/validation/validation_mocks.hpp
@@ -21,6 +21,7 @@
#include
#include "interfaces/iroha_internal/proposal.hpp"
+#include "interfaces/common_objects/types.hpp"
#include "validation/chain_validator.hpp"
#include "validation/stateful_validator.hpp"
@@ -29,7 +30,7 @@ namespace iroha {
class MockStatefulValidator : public validation::StatefulValidator {
public:
MOCK_METHOD2(validate,
- std::shared_ptr(
+ VerifiedProposalAndErrors(
const shared_model::interface::Proposal &,
ametsuchi::TemporaryWsv &));
};
From 09c0affff300cd9215618873336260a833f82a1a Mon Sep 17 00:00:00 2001
From: Kitsu
Date: Mon, 2 Jul 2018 21:59:05 +0300
Subject: [PATCH 15/97] Add color diagnostics (#1515)
Signed-off-by: Kitsu
---
CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d3ae2d5202..e3ad2bbdc5 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -9,7 +9,7 @@ endif()
PROJECT(iroha C CXX)
SET(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
-SET(CMAKE_CXX_FLAGS "-std=c++1y -Wall")
+SET(CMAKE_CXX_FLAGS "-std=c++1y -Wall -fdiagnostics-color=always")
SET(CMAKE_CXX_FLAGS_RELEASE "-O3")
SET(CMAKE_CXX_FLAGS_DEBUG "-g -Wextra -Wno-unused-parameter -Wno-deprecated-declarations -O0")
SET(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)
From 5d3df507f0981c8874bf39c8ee6f5d432a0c9299 Mon Sep 17 00:00:00 2001
From: Kitsu
Date: Mon, 2 Jul 2018 21:59:46 +0300
Subject: [PATCH 16/97] Reuse Peer in mst.proto (#1522)
Signed-off-by: Kitsu
---
.../transport/impl/mst_transport_grpc.cpp | 4 ++--
schema/mst.proto | 9 ++-------
2 files changed, 4 insertions(+), 9 deletions(-)
diff --git a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp
index 114148d561..035ba6931d 100644
--- a/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp
+++ b/irohad/multi_sig_transactions/transport/impl/mst_transport_grpc.cpp
@@ -54,7 +54,7 @@ grpc::Status MstTransportGrpc::SendState(
auto from = std::make_shared(
shared_model::proto::PeerBuilder()
.address(peer.address())
- .pubkey(shared_model::crypto::PublicKey(peer.pubkey()))
+ .pubkey(shared_model::crypto::PublicKey(peer.peer_key()))
.build());
subscriber_.lock()->onNewState(std::move(from), std::move(newState));
@@ -76,7 +76,7 @@ void MstTransportGrpc::sendState(const shared_model::interface::Peer &to,
transport::MstState protoState;
auto peer = protoState.mutable_peer();
- peer->set_pubkey(shared_model::crypto::toBinaryString(to.pubkey()));
+ peer->set_peer_key(shared_model::crypto::toBinaryString(to.pubkey()));
peer->set_address(to.address());
for (auto &tx : providing_state.getTransactions()) {
auto addtxs = protoState.add_transactions();
diff --git a/schema/mst.proto b/schema/mst.proto
index 202b061201..4d721dadc6 100644
--- a/schema/mst.proto
+++ b/schema/mst.proto
@@ -2,17 +2,12 @@ syntax = "proto3";
package iroha.network.transport;
import "block.proto";
+import "primitive.proto";
import "google/protobuf/empty.proto";
-// TODO: @l4l (04/05/18) remove in favor of primitive.proto IR-1321
-message Peer {
- bytes pubkey = 1;
- string address = 2;
-}
-
message MstState {
repeated iroha.protocol.Transaction transactions = 1;
- Peer peer = 2;
+ iroha.protocol.Peer peer = 2;
}
service MstTransportGrpc {
From 52e3f86e67bf65beeaf55f9a14630d8573c3d2c6 Mon Sep 17 00:00:00 2001
From: Igor Egorov
Date: Tue, 3 Jul 2018 11:14:07 +0300
Subject: [PATCH 17/97] Fix tests for createRole with empty set of permissions
(#1520)
The behavior of createRole command was changed, so the tests had to be updated.
Signed-off-by: Igor Egorov
---
shared_model/packages/javascript/tests/txbuilder.js | 2 +-
test/module/shared_model/bindings/BuilderTest.java | 7 ++-----
test/module/shared_model/bindings/builder-test.py | 4 ++--
3 files changed, 5 insertions(+), 8 deletions(-)
diff --git a/shared_model/packages/javascript/tests/txbuilder.js b/shared_model/packages/javascript/tests/txbuilder.js
index af46727baf..9487e81804 100644
--- a/shared_model/packages/javascript/tests/txbuilder.js
+++ b/shared_model/packages/javascript/tests/txbuilder.js
@@ -128,7 +128,7 @@ test('ModelTransactionBuilder tests', function (t) {
validPermissions.set(iroha.Role_kAddPeer)
validPermissions.set(iroha.Role_kAddAssetQty)
- t.throws(() => correctTx.createRole('new_user_role', emptyPerm).build(), /Permission set should contain at least one permission/, 'Should throw Permission set should contain at least one permission')
+ t.doesNotThrow(() => correctTx.createRole('new_user_role', emptyPerm).build(), null, 'Should not throw any exceptions')
t.throws(() => correctTx.createRole('', validPermissions).build(), /Wrongly formed role_id, passed value: ''/, 'Should throw Wrongly formed role_id')
t.throws(() => correctTx.createRole('@@@', validPermissions).build(), /Wrongly formed role_id, passed value: '@@@'/, 'Should throw Wrongly formed role_id')
t.throws(() => correctTx.createRole('new_user_role', '').build(), /argument 3 of type 'shared_model::interface::RolePermissionSet/, 'Should throw ...argument 3 of type...')
diff --git a/test/module/shared_model/bindings/BuilderTest.java b/test/module/shared_model/bindings/BuilderTest.java
index cb72c692a2..c53aa22cdf 100644
--- a/test/module/shared_model/bindings/BuilderTest.java
+++ b/test/module/shared_model/bindings/BuilderTest.java
@@ -819,11 +819,8 @@ void createRoleWithInvalidName() {
@Test
void createRoleEmptyPermissions() {
- RolePermissionSet permissions = new RolePermissionSet();
-
- ModelTransactionBuilder builder = new ModelTransactionBuilder();
- builder.createRole("new_role", permissions);
- assertThrows(IllegalArgumentException.class, builder::build);
+ UnsignedTx tx = builder.createRole("new_role", new RolePermissionSet()).build();
+ assertTrue(checkProtoTx(proto(tx)));
}
/* ====================== DetachRole Tests ====================== */
diff --git a/test/module/shared_model/bindings/builder-test.py b/test/module/shared_model/bindings/builder-test.py
index c82ca7175e..f7e6d21b4b 100644
--- a/test/module/shared_model/bindings/builder-test.py
+++ b/test/module/shared_model/bindings/builder-test.py
@@ -532,8 +532,8 @@ def test_create_role_with_invalid_name(self):
self.base().createRole(name, iroha.RolePermissionSet([iroha.Role_kReceive, iroha.Role_kGetRoles])).build()
def test_create_role_with_empty_permissions(self):
- with self.assertRaises(ValueError):
- self.base().createRole("user", iroha.RolePermissionSet([])).build()
+ tx = self.builder.createRole("user", iroha.RolePermissionSet([])).build()
+ self.assertTrue(self.check_proto_tx(self.proto(tx)))
# ====================== DetachRole Tests ======================
From 376dd532402afc31c90c61077a8c2fe0b1649661 Mon Sep 17 00:00:00 2001
From: kamilsa
Date: Tue, 3 Jul 2018 11:31:41 +0300
Subject: [PATCH 18/97] Proposal and block txs validation (#1506)
* Update proposal and block validators
Signed-off-by: kamilsa
---
.../iroha_internal/transaction_sequence.cpp | 15 +++++++-
.../iroha_internal/transaction_sequence.hpp | 4 ++-
shared_model/validators/block_validator.hpp | 21 +++++++-----
.../validators/container_validator.hpp | 19 ++++++++---
shared_model/validators/default_validator.hpp | 22 +++++++-----
.../validators/proposal_validator.hpp | 21 +++++++-----
...gned_transactions_collection_validator.cpp | 34 +++++++++++++++++--
...gned_transactions_collection_validator.hpp | 5 ++-
.../transactions_collection_validator.hpp | 9 +++++
...gned_transactions_collection_validator.cpp | 34 +++++++++++++++++--
...gned_transactions_collection_validator.hpp | 5 ++-
11 files changed, 151 insertions(+), 38 deletions(-)
diff --git a/shared_model/interfaces/iroha_internal/transaction_sequence.cpp b/shared_model/interfaces/iroha_internal/transaction_sequence.cpp
index 2f21609c48..04aaf8f682 100644
--- a/shared_model/interfaces/iroha_internal/transaction_sequence.cpp
+++ b/shared_model/interfaces/iroha_internal/transaction_sequence.cpp
@@ -4,14 +4,18 @@
*/
#include "interfaces/iroha_internal/transaction_sequence.hpp"
+#include "validators/field_validator.hpp"
+#include "validators/transaction_validator.hpp"
namespace shared_model {
namespace interface {
+ template
iroha::expected::Result
TransactionSequence::createTransactionSequence(
const types::TransactionsForwardCollectionType &transactions,
- const validation::TransactionsCollectionValidator &validator) {
+ const validation::TransactionsCollectionValidator
+ &validator) {
auto answer = validator.validate(transactions);
if (answer.hasErrors()) {
return iroha::expected::makeError(answer.reason());
@@ -19,6 +23,15 @@ namespace shared_model {
return iroha::expected::makeValue(TransactionSequence(transactions));
}
+ template iroha::expected::Result
+ TransactionSequence::createTransactionSequence(
+ const types::TransactionsForwardCollectionType &transactions,
+ const validation::TransactionsCollectionValidator<
+ validation::TransactionValidator<
+ validation::FieldValidator,
+ validation::CommandValidatorVisitor<
+ validation::FieldValidator>>> &validator);
+
types::TransactionsForwardCollectionType
TransactionSequence::transactions() {
return transactions_;
diff --git a/shared_model/interfaces/iroha_internal/transaction_sequence.hpp b/shared_model/interfaces/iroha_internal/transaction_sequence.hpp
index 9c5a8fe0ed..43e3aa7f56 100644
--- a/shared_model/interfaces/iroha_internal/transaction_sequence.hpp
+++ b/shared_model/interfaces/iroha_internal/transaction_sequence.hpp
@@ -28,10 +28,12 @@ namespace shared_model {
* @return Result containing transaction sequence if validation successful
* and string message containing error otherwise
*/
+ template
static iroha::expected::Result
createTransactionSequence(
const types::TransactionsForwardCollectionType &transactions,
- const validation::TransactionsCollectionValidator &validator);
+ const validation::TransactionsCollectionValidator<
+ TransactionValidator> &validator);
/**
* Get transactions collection
diff --git a/shared_model/validators/block_validator.hpp b/shared_model/validators/block_validator.hpp
index 31b1b95c8c..975b762b02 100644
--- a/shared_model/validators/block_validator.hpp
+++ b/shared_model/validators/block_validator.hpp
@@ -33,10 +33,14 @@ namespace shared_model {
/**
* Class that validates block
*/
- template
- class BlockValidator : public ContainerValidator {
+ template
+ class BlockValidator
+ : public ContainerValidator {
public:
/**
* Applies validation on block
@@ -44,10 +48,11 @@ namespace shared_model {
* @return Answer containing found error if any
*/
Answer validate(const interface::Block &block) const {
- return ContainerValidator::validate(block,
- "Block");
+ return ContainerValidator<
+ interface::Block,
+ FieldValidator,
+ TransactionValidator,
+ TransactionsCollectionValidator>::validate(block, "Block");
}
};
diff --git a/shared_model/validators/container_validator.hpp b/shared_model/validators/container_validator.hpp
index afec679e87..79a7b5ecde 100644
--- a/shared_model/validators/container_validator.hpp
+++ b/shared_model/validators/container_validator.hpp
@@ -34,7 +34,8 @@ namespace shared_model {
*/
template
+ typename TransactionValidator,
+ typename TransactionsCollectionValidator>
class ContainerValidator {
protected:
void validateTransaction(
@@ -50,18 +51,25 @@ namespace shared_model {
ReasonsGroupType &reason,
const interface::types::TransactionsCollectionType &transactions)
const {
- for (const auto &tx : transactions) {
- validateTransaction(reason, tx);
+ auto answer = transactions_collection_validator_.validate(transactions);
+ if (answer.hasErrors()) {
+ reason.second.push_back(answer.reason());
}
}
public:
- ContainerValidator(
+ explicit ContainerValidator(
+ const TransactionsCollectionValidator
+ &transactions_collection_validator =
+ TransactionsCollectionValidator(),
const TransactionValidator &transaction_validator =
TransactionValidator(),
const FieldValidator &field_validator = FieldValidator())
- : transaction_validator_(transaction_validator),
+ : transactions_collection_validator_(
+ transactions_collection_validator),
+ transaction_validator_(transaction_validator),
field_validator_(field_validator) {}
+
Answer validate(const Iface &cont, std::string reason_name) const {
Answer answer;
ReasonsGroupType reason;
@@ -76,6 +84,7 @@ namespace shared_model {
}
private:
+ TransactionsCollectionValidator transactions_collection_validator_;
TransactionValidator transaction_validator_;
protected:
diff --git a/shared_model/validators/default_validator.hpp b/shared_model/validators/default_validator.hpp
index 072d2043ad..a1055100a8 100644
--- a/shared_model/validators/default_validator.hpp
+++ b/shared_model/validators/default_validator.hpp
@@ -27,20 +27,29 @@
#include "validators/query_validator.hpp"
#include "validators/signable_validator.hpp"
#include "validators/transaction_validator.hpp"
+#include "validators/transactions_collection/signed_transactions_collection_validator.hpp"
+#include "validators/transactions_collection/unsigned_transactions_collection_validator.hpp"
namespace shared_model {
namespace validation {
using DefaultTransactionValidator =
TransactionValidator>;
+
using DefaultQueryValidator =
QueryValidator>;
+
using DefaultBlocksQueryValidator = BlocksQueryValidator;
- using DefaultProposalValidator =
- ProposalValidator;
- using DefaultBlockValidator =
- BlockValidator;
+ using DefaultProposalValidator = ProposalValidator<
+ FieldValidator,
+ DefaultTransactionValidator,
+ UnsignedTransactionsCollectionValidator>;
+
+ using DefaultBlockValidator = BlockValidator<
+ FieldValidator,
+ DefaultTransactionValidator,
+ SignedTransactionsCollectionValidator>;
using DefaultEmptyBlockValidator = EmptyBlockValidator;
@@ -57,11 +66,6 @@ namespace shared_model {
const interface::Query &,
FieldValidator>;
- using DefaultSignableProposalValidator =
- SignableModelValidator;
-
using DefaultSignableBlockValidator =
SignableModelValidator
- class ProposalValidator : public ContainerValidator {
+ template
+ class ProposalValidator
+ : public ContainerValidator {
public:
/**
* Applies validation on proposal
@@ -44,10 +48,11 @@ namespace shared_model {
* @return Answer containing found error if any
*/
Answer validate(const interface::Proposal &prop) const {
- return ContainerValidator::validate(prop,
- "Proposal");
+ return ContainerValidator<
+ interface::Proposal,
+ FieldValidator,
+ TransactionValidator,
+ TransactionsCollectionValidator>::validate(prop, "Proposal");
}
};
diff --git a/shared_model/validators/transactions_collection/signed_transactions_collection_validator.cpp b/shared_model/validators/transactions_collection/signed_transactions_collection_validator.cpp
index c23266de92..528858d7a3 100644
--- a/shared_model/validators/transactions_collection/signed_transactions_collection_validator.cpp
+++ b/shared_model/validators/transactions_collection/signed_transactions_collection_validator.cpp
@@ -5,14 +5,44 @@
#include "validators/transactions_collection/signed_transactions_collection_validator.hpp"
+#include
+#include "validators/field_validator.hpp"
+#include "validators/transaction_validator.hpp"
+
namespace shared_model {
namespace validation {
- Answer SignedTransactionsCollectionValidator::validate(
+ template
+ Answer
+ SignedTransactionsCollectionValidator::validate(
const interface::types::TransactionsForwardCollectionType &transactions)
const {
- return Answer();
+ ReasonsGroupType reason;
+ reason.first = "Transaction list";
+ for (const auto &tx : transactions) {
+ auto answer =
+ SignedTransactionsCollectionValidator::transaction_validator_
+ .validate(tx);
+ if (answer.hasErrors()) {
+ auto message =
+ (boost::format("Tx %s : %s") % tx.hash().hex() % answer.reason())
+ .str();
+ reason.second.push_back(message);
+ }
+ }
+
+ Answer res;
+ if (not reason.second.empty()) {
+ res.addReason(std::move(reason));
+ }
+ return res;
}
+ template Answer SignedTransactionsCollectionValidator<
+ TransactionValidator>>::
+ validate(const interface::types::TransactionsForwardCollectionType
+ &transactions) const;
+
} // namespace validation
} // namespace shared_model
diff --git a/shared_model/validators/transactions_collection/signed_transactions_collection_validator.hpp b/shared_model/validators/transactions_collection/signed_transactions_collection_validator.hpp
index 882e8b3009..d5853b3c50 100644
--- a/shared_model/validators/transactions_collection/signed_transactions_collection_validator.hpp
+++ b/shared_model/validators/transactions_collection/signed_transactions_collection_validator.hpp
@@ -16,9 +16,12 @@ namespace shared_model {
* transaction from the collection to be unsigned. Batch logic should be
* checked
*/
+ template