Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

trying to update versions to ccs to check if ci works fine #999

Open
wants to merge 2 commits into
base: 3.7
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
300 changes: 129 additions & 171 deletions Jenkinsfile
Original file line number Diff line number Diff line change
@@ -1,195 +1,153 @@
#!/usr/bin/env groovy

/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

def doValidation() {
// Run all the tasks associated with `check` except for `test` - the latter is executed via `doTest`
sh """
./retry_zinc ./gradlew -PscalaVersion=$SCALA_VERSION clean check -x test \
--profile --continue -PxmlSpotBugsReport=true -PkeepAliveMode="session"
"""
}

def isChangeRequest(env) {
env.CHANGE_ID != null && !env.CHANGE_ID.isEmpty()
def config = jobConfig {
cron = '@weekly'
nodeLabel = 'docker-oraclejdk8'
testResultSpecs = ['junit': '**/build/test-results/**/TEST-*.xml']
slackChannel = '#kafka-warn'
timeoutHours = 4
runMergeCheck = false
downStreamValidate = true
downStreamRepos = ["common",]
nanoVersion = true
disableConcurrentBuilds = true
}

def doTest(env, target = "test") {
sh """./gradlew -PscalaVersion=$SCALA_VERSION ${target} \
--profile --continue -PkeepAliveMode="session" -PtestLoggingEvents=started,passed,skipped,failed \
-PignoreFailures=true -PmaxParallelForks=2 -PmaxTestRetries=1 -PmaxTestRetryFailures=10"""
junit '**/build/test-results/**/TEST-*.xml'
def retryFlagsString(jobConfig) {
if (jobConfig.isPrJob) " -PmaxTestRetries=1 -PmaxTestRetryFailures=5"
else ""
}

def doStreamsArchetype() {
echo 'Verify that Kafka Streams archetype compiles'

sh '''
./gradlew streams:publishToMavenLocal clients:publishToMavenLocal connect:json:publishToMavenLocal connect:api:publishToMavenLocal \
|| { echo 'Could not publish kafka-streams.jar (and dependencies) locally to Maven'; exit 1; }
'''

VERSION = sh(script: 'grep "^version=" gradle.properties | cut -d= -f 2', returnStdout: true).trim()

dir('streams/quickstart') {
sh '''
mvn clean install -Dgpg.skip \
|| { echo 'Could not `mvn install` streams quickstart archetype'; exit 1; }
'''

dir('test-streams-archetype') {
// Note the double quotes for variable interpolation
sh """
echo "Y" | mvn archetype:generate \
-DarchetypeCatalog=local \
-DarchetypeGroupId=org.apache.kafka \
-DarchetypeArtifactId=streams-quickstart-java \
-DarchetypeVersion=${VERSION} \
-DgroupId=streams.examples \
-DartifactId=streams.examples \
-Dversion=0.1 \
-Dpackage=myapps \
|| { echo 'Could not create new project using streams quickstart archetype'; exit 1; }
"""

dir('streams.examples') {
sh '''
mvn compile \
|| { echo 'Could not compile streams quickstart archetype project'; exit 1; }
'''
}
def downstreamBuildFailureOutput = ""
def publishStep(String vaultSecret) {
withGradleFile(["gradle/${vaultSecret}", "settings_file", "${env.WORKSPACE}/init.gradle", "GRADLE_NEXUS_SETTINGS"]) {
sh "./gradlewAll --init-script ${GRADLE_NEXUS_SETTINGS} --no-daemon uploadArchives"
}
}
}

def tryStreamsArchetype() {
try {
doStreamsArchetype()
} catch(err) {
echo 'Failed to build Kafka Streams archetype, marking this build UNSTABLE'
currentBuild.result = 'UNSTABLE'
}
}
def job = {
// https://github.com/confluentinc/common-tools/blob/master/confluent/config/dev/versions.json
def kafkaMuckrakeVersionMap = [
"2.3": "5.3.x",
"2.4": "5.4.x",
"2.5": "5.5.x",
"2.6": "6.0.x",
"2.7": "6.1.x",
"2.8": "6.2.x",
"3.0": "7.0.x",
"3.1": "7.1.x",
"3.2": "7.2.x",
"3.3": "7.3.x",
"3.4": "7.4.x",
"3.5": "7.5.x",
"3.6": "7.6.x",
"trunk": "master",
"master": "master"
]

if (config.nanoVersion && !config.isReleaseJob) {
ciTool("ci-update-version ${env.WORKSPACE} kafka", config.isPrJob)
}

stage("Check compilation compatibility with Scala 2.12") {
sh """
./retry_zinc ./gradlew clean build -x test \
--no-daemon --stacktrace -PxmlSpotBugsReport=true -PscalaVersion=2.12
"""
}

pipeline {
agent none

options {
disableConcurrentBuilds(abortPrevious: isChangeRequest(env))
}

stages {
stage('Build') {
parallel {

stage('JDK 8 and Scala 2.12') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_1.8_latest'
maven 'maven_3_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.12
}
steps {
doValidation()
doTest(env)
tryStreamsArchetype()
}
}

stage('JDK 11 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_11_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
doTest(env)
echo 'Skipping Kafka Streams archetype test for Java 11'
}
}
stage("Compile and validate") {
sh """
./retry_zinc ./gradlew clean publishToMavenLocal build -x test \
--no-daemon --stacktrace -PxmlSpotBugsReport=true
"""
}

stage('JDK 17 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_17_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
doTest(env)
echo 'Skipping Kafka Streams archetype test for Java 17'
}
if (config.publish) {
stage("Publish to artifactory") {
if (!config.isReleaseJob && !config.isPrJob) {
ciTool("ci-push-tag ${env.WORKSPACE} kafka")
}

stage('JDK 21 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_21_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
doTest(env)
echo 'Skipping Kafka Streams archetype test for Java 21'
}
if (config.isDevJob) {
publishStep('artifactory_snapshots_settings')
} else if (config.isPreviewJob) {
publishStep('artifactory_preview_release_settings')
}
}
}
}

post {
always {
script {
if (!isChangeRequest(env)) {
node('ubuntu') {
step([$class: 'Mailer',
notifyEveryUnstableBuild: true,
recipients: "[email protected]",
sendToIndividuals: false])
}

if (config.publish && config.isDevJob && !config.isReleaseJob && !config.isPrJob) {
stage("Start Downstream Builds") {
def downstreamBranch = kafkaMuckrakeVersionMap[env.BRANCH_NAME]
config.downStreamRepos.each { repo ->
build(job: "confluentinc/${repo}/${downstreamBranch}",
wait: false,
propagate: false
)
}
}
}
}
}

def runTestsStepName = "Step run-tests"
def downstreamBuildsStepName = "Step cp-downstream-builds"
def testTargets = [
runTestsStepName: {
stage('Run tests') {
echo "Running unit and integration tests"
sh "./gradlew unitTest integrationTest " +
"--no-daemon --stacktrace --continue -PtestLoggingEvents=started,passed,skipped,failed -PmaxParallelForks=4 -PignoreFailures=true" +
retryFlagsString(config)
}
stage('Upload results') {
// Kafka failed test stdout files
archiveArtifacts artifacts: '**/testOutput/*.stdout', allowEmptyArchive: true

def summary = junit '**/build/test-results/**/TEST-*.xml'
def total = summary.getTotalCount()
def failed = summary.getFailCount()
def skipped = summary.getSkipCount()
summary = "Test results:\n\t"
summary = summary + ("Passed: " + (total - failed - skipped))
summary = summary + (", Failed: " + failed)
summary = summary + (", Skipped: " + skipped)
return summary;
}
},
downstreamBuildsStepName: {
echo "Building cp-downstream-builds"
stage('Downstream validation') {
if (config.isPrJob && config.downStreamValidate) {
downStreamValidation(config.nanoVersion, true, true)
} else {
return "skip downStreamValidation"
}
}
}
]

result = parallel testTargets
// combine results of the two targets into one result string
return result.runTestsStepName + "\n" + result.downstreamBuildsStepName
}

runJob config, job
echo downstreamBuildFailureOutput
2 changes: 1 addition & 1 deletion gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ group=org.apache.kafka
# - streams/quickstart/pom.xml
# - streams/quickstart/java/src/main/resources/archetype-resources/pom.xml
# - streams/quickstart/java/pom.xml
version=3.7.0-SNAPSHOT
version=7.7.0-0-ccs
scalaVersion=2.13.12
# Adding swaggerVersion in gradle.properties to have a single version in place for swagger
# New version of Swagger 2.2.14 requires minimum JDK 11.
Expand Down
2 changes: 1 addition & 1 deletion streams/quickstart/java/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
<parent>
<groupId>org.apache.kafka</groupId>
<artifactId>streams-quickstart</artifactId>
<version>3.7.0-SNAPSHOT</version>
<version>7.7.0-0-ccs</version>
<relativePath>..</relativePath>
</parent>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<kafka.version>3.7.0-SNAPSHOT</kafka.version>
<kafka.version>7.7.0-0-ccs</kafka.version>
<slf4j.version>1.7.36</slf4j.version>
</properties>

Expand Down
2 changes: 1 addition & 1 deletion streams/quickstart/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<groupId>org.apache.kafka</groupId>
<artifactId>streams-quickstart</artifactId>
<packaging>pom</packaging>
<version>3.7.0-SNAPSHOT</version>
<version>7.7.0-0-ccs</version>

<name>Kafka Streams :: Quickstart</name>

Expand Down
2 changes: 1 addition & 1 deletion tests/kafkatest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@
# Instead, in development branches, the version should have a suffix of the form ".devN"
#
# For example, when Kafka is at version 1.0.0-SNAPSHOT, this should be something like "1.0.0.dev0"
__version__ = '3.7.0.dev0'
__version__ = '7.7.0-0.dev0'
2 changes: 1 addition & 1 deletion tests/kafkatest/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def get_version(node=None):
return DEV_BRANCH

DEV_BRANCH = KafkaVersion("dev")
DEV_VERSION = KafkaVersion("3.7.0-SNAPSHOT")
DEV_VERSION = KafkaVersion("7.7.0-0")

LATEST_METADATA_VERSION = "3.7"

Expand Down