@@ -23,19 +23,19 @@ permissions:
2323jobs :
2424 workflow_approval :
2525 name : Approve workflow
26- runs-on : ubuntu-20 .04
26+ runs-on : ubuntu-24 .04
2727 environment : workflow-approval
2828 steps :
2929 - name : Approve workflow
3030 run : echo For security reasons, all pull requests need to be approved first before running any automated CI.
3131
3232 fossa-scan :
3333 continue-on-error : true
34- runs-on : ubuntu-latest
34+ runs-on : ubuntu-24.04
3535 needs :
3636 - workflow_approval
3737 steps :
38- - name : Checkout
38+ - name : Checkout
3939 uses : actions/checkout@v3
4040 with :
4141 ref : ${{github.event.pull_request.head.sha}}
4848 env :
4949 FOSSA_API_KEY : ${{ secrets.FOSSA_API_KEY }}
5050 - name : upload THIRDPARTY file
51- uses : actions/upload-artifact@v3
51+ uses : actions/upload-artifact@v4
5252 with :
5353 name : THIRDPARTY
5454 path : /tmp/THIRDPARTY
5959 FOSSA_API_KEY : ${{ secrets.FOSSA_API_KEY }}
6060
6161 semgrep :
62- runs-on : ubuntu-latest
62+ runs-on : ubuntu-24.04
6363 needs :
6464 - workflow_approval
6565 name : security-sast-semgrep
8888
8989 build-unit-test :
9090 name : build and run unit test
91- runs-on : ubuntu-20 .04
91+ runs-on : ubuntu-24 .04
9292 needs :
9393 - workflow_approval
9494 steps :
@@ -108,7 +108,7 @@ jobs:
108108 cp -R target/splunk-kafka-connect*.jar /tmp
109109
110110 - name : Upload artifact
111- uses : actions/upload-artifact@v3
111+ uses : actions/upload-artifact@v4
112112 with :
113113 name : splunk-kafka-connector
114114 path : /tmp/splunk-kafka-connect*.jar
@@ -121,39 +121,42 @@ jobs:
121121 files : " target/surefire-reports/*.xml"
122122
123123 e2e_test :
124- name : e2e test - kafka version- ${{ matrix.kafka_version }}
125- runs-on : ubuntu-20.04
124+ name : e2e test (kafka:${{ matrix. kafka.kafka_version }}, splunk: ${{ matrix.splunk.splunk_version }})
125+ runs-on : ubuntu-latest
126126 needs :
127127 - build-unit-test
128128 strategy :
129129 fail-fast : false
130130 matrix :
131- include :
132- - kafka_version : " 1.1.1"
133- kafka_package : " kafka_2.11-1.1.1.tgz"
134- - kafka_version : " 2.0.0"
135- kafka_package : " kafka_2.11-2.0.0.tgz"
136- - kafka_version : " 2.1.0"
137- kafka_package : " kafka_2.12-2.1.0.tgz"
138- - kafka_version : " 2.6.0"
139- kafka_package : " kafka_2.13-2.6.0.tgz"
140- - kafka_version : " 2.7.1"
141- kafka_package : " kafka_2.13-2.7.1.tgz"
142- - kafka_version : " 2.8.0"
143- kafka_package : " kafka_2.13-2.8.0.tgz"
144- - kafka_version : " 3.0.0"
145- kafka_package : " kafka_2.13-3.0.0.tgz"
146- - kafka_version : " 3.1.0"
147- kafka_package : " kafka_2.13-3.1.0.tgz"
148- - kafka_version : " 3.3.1"
149- kafka_package : " kafka_2.13-3.3.1.tgz"
150- - kafka_version : " 3.4.1"
151- kafka_package : " kafka_2.13-3.4.1.tgz"
131+ splunk :
132+ - splunk_version : " 10.0.0"
133+ splunk_filename : " splunk-10.0.0-e8eb0c4654f8-linux-amd64.tgz"
134+ - splunk_version : " 9.4.4"
135+ splunk_filename : " splunk-9.4.4-f627d88b766b-linux-amd64.tgz"
136+ kafka :
152137 - kafka_version : " 3.5.1"
153- kafka_package : " kafka_2.13-3.5.1.tgz"
138+ kafka_package : " kafka_2.12-3.5.1.tgz"
139+ confluent_major_version : " 7.5"
140+ confluent_package_version : " 7.5.9"
141+ - kafka_version : " 3.6.2"
142+ kafka_package : " kafka_2.12-3.6.2.tgz"
143+ confluent_major_version : " 7.6"
144+ confluent_package_version : " 7.6.6"
145+ - kafka_version : " 3.7.2"
146+ kafka_package : " kafka_2.12-3.7.2.tgz"
147+ confluent_major_version : " 7.7"
148+ confluent_package_version : " 7.7.4"
149+ - kafka_version : " 3.8.1"
150+ kafka_package : " kafka_2.12-3.8.1.tgz"
151+ confluent_major_version : " 7.8"
152+ confluent_package_version : " 7.8.3"
153+ - kafka_version : " 3.9.0"
154+ kafka_package : " kafka_2.12-3.9.0.tgz"
155+ confluent_major_version : " 7.9"
156+ confluent_package_version : " 7.9.2"
154157 env :
155- CI_SPLUNK_VERSION : " 9.0.2 "
156- CI_SPLUNK_FILENAME : splunk-9.0.2-17e00c557dc1-Linux-x86_64.tgz
158+ CI_SPLUNK_VERSION : ${{matrix.splunk.splunk_version}}
159+ CI_SPLUNK_FILENAME : ${{matrix. splunk.splunk_filename}}
157160 CI_SPLUNK_HOST : 127.0.0.1
158161 CI_SPLUNK_PORT : 8089
159162 CI_SPLUNK_USERNAME : admin
@@ -164,6 +167,7 @@ jobs:
164167 CI_KAFKA_HEADER_INDEX : kafka
165168 CI_DATAGEN_IMAGE : rock1017/log-generator:latest
166169 CI_OLD_CONNECTOR_VERSION : v2.0.1
170+ CI_KAFKA_VERSION_BEFORE_3_7 : ${{ matrix.kafka.kafka_version == '3.5.1' || matrix.kafka.kafka_version == '3.6.2' }}
167171
168172 steps :
169173 - name : Checkout
@@ -174,7 +178,7 @@ jobs:
174178
175179 - name : Install Splunk
176180 run : |
177- cd /opt && wget -O $CI_SPLUNK_FILENAME 'https://d7wz6hmoaavd0.cloudfront.net /products/splunk/releases/'$CI_SPLUNK_VERSION'/linux/'$CI_SPLUNK_FILENAME''
181+ cd /opt && wget -O $CI_SPLUNK_FILENAME 'https://download.splunk.com /products/splunk/releases/'$CI_SPLUNK_VERSION'/linux/'$CI_SPLUNK_FILENAME''
178182 sudo tar xzvf $CI_SPLUNK_FILENAME
179183 # Set user seed
180184 hashed_pwd=$(sudo /opt/splunk/bin/splunk hash-passwd $CI_SPLUNK_PASSWORD)
@@ -206,11 +210,11 @@ jobs:
206210 # Restart Splunk
207211 curl -k -u $CI_SPLUNK_USERNAME:$CI_SPLUNK_PASSWORD https://$CI_SPLUNK_HOST:$CI_SPLUNK_PORT/services/server/control/restart -X POST
208212
209- - name : Install Kafka ${{ matrix.kafka_version }}
213+ - name : Install Kafka ${{ matrix.kafka. kafka_version }}
210214 run : |
211- cd /tmp && wget https://archive.apache.org/dist/kafka/${{ matrix.kafka_version }}/${{ matrix.kafka_package }}
212- sudo tar xzf ${{ matrix.kafka_package }}
213- rm ${{ matrix.kafka_package }}
215+ cd /tmp && wget https://archive.apache.org/dist/kafka/${{ matrix.kafka. kafka_version }}/${{ matrix.kafka .kafka_package }}
216+ sudo tar xzf ${{ matrix.kafka. kafka_package }}
217+ rm ${{ matrix.kafka. kafka_package }}
214218 sudo mv kafka_* /usr/local/kafka
215219 cd /usr/local/kafka && ls
216220
@@ -232,17 +236,17 @@ jobs:
232236 check-latest : true
233237
234238 - name : Download artifact
235- uses : actions/download-artifact@v3
239+ uses : actions/download-artifact@v4
236240 with :
237241 name : splunk-kafka-connector
238242 path : /tmp
239243
240244 - name : Up the Schema Registry
241245 run : |
242- cd /tmp && wget https://packages.confluent.io/archive/7.1 /confluent-community-7.1.1 .tar.gz
243- sudo tar xzf confluent-community-7.1.1 .tar.gz
244- cd confluent-7.1.1
245- bin/schema-registry-start ./etc/schema-registry/schema-registry.properties &
246+ cd /tmp && wget https://packages.confluent.io/archive/${{ matrix.kafka.confluent_major_version }} /confluent-community-${{ matrix.kafka.confluent_package_version }} .tar.gz
247+ sudo tar xzf confluent-community-${{ matrix.kafka.confluent_package_version }} .tar.gz
248+ cd confluent-${{ matrix.kafka.confluent_package_version }}
249+ sudo bin/schema-registry-start ./etc/schema-registry/schema-registry.properties &
246250
247251 - name : Register the protobuf schema
248252 run : |
@@ -353,10 +357,10 @@ jobs:
353357 python test/lib/eventproducer_connector_upgrade.py 2000 --log-level=INFO
354358 # Check in splunk that we have recieved 2000 events for with ack and without ack tasks
355359 python test/lib/connector_upgrade.py --log-level=INFO
356- - uses : actions/upload-artifact@v3
360+ - uses : actions/upload-artifact@v4
357361 if : failure()
358362 with :
359- name : kafka-connect-logs-${{ matrix.kafka_version }}
363+ name : kafka-connect-logs-${{ matrix.kafka. kafka_version }}
360364 path : output.log
361365
362366 - name : Install kafka connect
@@ -374,9 +378,9 @@ jobs:
374378 export PYTHONWARNINGS="ignore:Unverified HTTPS request"
375379 echo "Running functional tests....."
376380 python -m pytest --log-level=INFO
377-
378- - uses : actions/upload-artifact@v3
381+
382+ - uses : actions/upload-artifact@v4
379383 if : failure()
380384 with :
381- name : splunk-events-${{ matrix.kafka_version }}
385+ name : splunk-events-${{ matrix.kafka. kafka_version }}
382386 path : events.txt
0 commit comments