Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new CI for tiflow to enable integration tests of sync_diff_inspector #3387

Merged
merged 6 commits into from
Feb 17, 2025
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
apiVersion: v1
kind: Pod
spec:
securityContext:
fsGroup: 1000
containers:
- name: runner
image: hub.pingcap.net/jenkins/centos7_golang-1.23:latest
tty: true
env:
- name: GOPATH
value: /go
resources:
limits:
memory: 8Gi
cpu: "4"
- name: net-tool
image: hub.pingcap.net/jenkins/network-multitool
tty: true
resources:
limits:
memory: 128Mi
cpu: 100m
- name: mysql
image: hub.pingcap.net/jenkins/mysql:5.7
tty: true
args: ["--server-id=1", "--log-bin", "--binlog-format=ROW"]
env:
- name: MYSQL_ALLOW_EMPTY_PASSWORD
value: "1"
resources:
limits:
memory: 2Gi
cpu: "1"
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: kubernetes.io/arch
operator: In
values:
- amd64
116 changes: 116 additions & 0 deletions pipelines/pingcap/tiflow/latest/pull_syncdiff_integration_test.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline
// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984
// should triggerd for master and latest release branches
@Library('tipipeline') _
final K8S_NAMESPACE = "jenkins-tidb"
final GIT_FULL_REPO_NAME = 'pingcap/tiflow'
final POD_TEMPLATE_FILE = 'pipelines/pingcap/tiflow/latest/pod-pull_syncdiff_integration_test.yaml'
final REFS = readJSON(text: params.JOB_SPEC).refs
pipeline {
agent {
kubernetes {
namespace K8S_NAMESPACE
yamlFile POD_TEMPLATE_FILE
defaultContainer 'runner'
}
}
environment {
FILE_SERVER_URL = 'http://fileserver.pingcap.net'
}
options {
timeout(time: 40, unit: 'MINUTES')
parallelsAlwaysFailFast()
}
stages {
stage('Debug info') {
steps {
sh label: 'Debug info', script: """
printenv
echo "-------------------------"
go env
echo "-------------------------"
echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash"
"""
container(name: 'net-tool') {
sh 'dig github.com'
script {
currentBuild.description = "PR #${REFS.pulls[0].number}: ${REFS.pulls[0].title} ${REFS.pulls[0].link}"
}
}
}
}
stage('Checkout') {
options { timeout(time: 10, unit: 'MINUTES') }
steps {
dir(REFS.repo) {
cache(path: "./", includes: '**/*', key: prow.getCacheKey('git', REFS), restoreKeys: prow.getRestoreKeys('git', REFS)) {
script {
retry(2) {
prow.checkoutRefs(REFS, timeout = 5, credentialsId = '', gitBaseUrl = 'https://github.com', withSubmodule=true)
}
}
}
}
}
}
stage('Integration Test') {
steps {
dir("REFS.repo") {
script {
component.fetchAndExtractArtifact(FILE_SERVER_URL, 'dumpling', REFS.base_ref, REFS.pulls[0].title, 'centos7/dumpling.tar.gz', 'bin')
component.fetchAndExtractArtifact(FILE_SERVER_URL, 'tikv', REFS.base_ref, REFS.pulls[0].title, 'centos7/tikv-server.tar.gz', 'bin')
component.fetchAndExtractArtifact(FILE_SERVER_URL, 'pd', REFS.base_ref, REFS.pulls[0].title, 'centos7/pd-server.tar.gz', 'bin')
component.fetchAndExtractArtifact(FILE_SERVER_URL, 'tidb', REFS.base_ref, REFS.pulls[0].title, 'centos7/tidb-server.tar.gz', 'bin')
}
sh label: "download enterprise-tools-nightly", script: """
wget --no-verbose --retry-connrefused --waitretry=1 -t 3 -O tidb-enterprise-tools-nightly-linux-amd64.tar.gz https://download.pingcap.org/tidb-enterprise-tools-nightly-linux-amd64.tar.gz
tar -xzf tidb-enterprise-tools-nightly-linux-amd64.tar.gz
mv tidb-enterprise-tools-nightly-linux-amd64/bin/loader bin/
mv tidb-enterprise-tools-nightly-linux-amd64/bin/importer bin/
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are there specific requirements for the versions of these two tools? The product in this URL has not been updated for a long time: https://download.pingcap.org/tidb-enterprise-tools-nightly-linux-amd64.tar.gz

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suggest caching these two products on the internal fileserver to reduce download time.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we don't need to specify the versions of these two tools. Acutally I don't even know where the source code of the loader is.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let me check where it is from.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we don't need to specify the versions of these two tools. Acutally I don't even know where the source code of the loader is.

loder is from an archived repo https://github.com/pingcap-inc/tidb-enterprise-tools

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So let's just use binary.

caching these two products on the internal fileserver

How to cache these two binary?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will cache these two binaries to the internal fileserver. The current PR can be merged and take effect first, and after testing, I will update it to the internal download address.

rm -r tidb-enterprise-tools-nightly-linux-amd64
"""
sh label: "check", script: """
which bin/tikv-server
which bin/pd-server
which bin/tidb-server
which bin/dumpling
which bin/importer
ls -alh ./bin/
chmod +x bin/*
./bin/dumpling --version
./bin/tikv-server -V
./bin/pd-server -V
./bin/tidb-server -V
"""
sh label: 'sync_diff_inspector integration test', script: """
for i in {1..10} mysqladmin ping -h0.0.0.0 -P 3306 -uroot --silent; do if [ \$? -eq 0 ]; then break; else if [ \$i -eq 10 ]; then exit 2; fi; sleep 1; fi; done
export MYSQL_HOST="127.0.0.1"
export MYSQL_PORT=3306
make failpoint-enable
make sync-diff-inspector
make failpoint-disable
cd sync_diff_inspector && ln -sf ../bin . && ./tests/run.sh
"""
}
}
post{
unsuccessful {
sh label: 'archive logs', script: """
tar --warning=no-file-changed -cvzf logs.tar.gz \$(find /tmp/sync_diff_inspector_test/ -type f -name "*.log")
tar --warning=no-file-changed -cvzf fix_sqls.tar.gz \$(find /tmp/sync_diff_inspector_test/sync_diff_inspector/output/fix-on-tidb/ -type f -name "*.sql")
"""
archiveArtifacts artifacts: "logs.tar.gz", fingerprint: true
archiveArtifacts artifacts: "fix_sqls.tar.gz", fingerprint: true
sh label: 'print logs', script:'''
find /tmp/sync_diff_inspector_test -name "*.log" | xargs -I {} bash -c 'echo "**************************************"; echo "{}"; cat "{}"'
echo ""
echo "******************sync_diff.log********************"
cat /tmp/sync_diff_inspector_test/sync_diff_inspector/output/sync_diff.log
echo "********************fix.sql********************"
find /tmp/sync_diff_inspector_test/sync_diff_inspector/output/fix-on-tidb -name "*.sql" | xargs -I {} bash -c 'echo "**************************************"; echo "{}"; cat "{}"'
'''
}
}
}
}
}