pipeline { agent none options { disableConcurrentBuilds() skipDefaultCheckout(true) buildDiscarder(logRotator(numToKeepStr: '20', artifactNumToKeepStr: '20')) } environment { GIT_REMOTE_URL = 'http://127.0.0.1:3000/GenarrativeAI/Genarrative.git' } parameters { choice(name: 'DEPLOY_TARGET', choices: ['development', 'release'], description: '逻辑导入目标;development 使用当前 Linux 开发/构建/开发部署 agent') booleanParam(name: 'CONFIRM_RELEASE_DEPLOY_AGENT', defaultValue: false, description: '确认 release 目标已有独立 release 部署 agent;当前 Linux 开发/构建/开发部署 agent 不可冒充 release 部署机') string(name: 'SOURCE_BRANCH', defaultValue: 'master', description: '导入脚本来源分支') string(name: 'COMMIT_HASH', defaultValue: '', description: '导入脚本来源 commit') string(name: 'NOTIFICATION_EMAILS', defaultValue: '', description: '本次运行追加通知邮箱;会与 Jenkins Secret Text 凭据 genarrative-notification-emails 合并发送') string(name: 'DATABASE', defaultValue: 'genarrative-prod', description: 'SpacetimeDB database') string(name: 'SPACETIME_SERVER', defaultValue: 'local', description: 'SpacetimeDB server alias') string(name: 'SPACETIME_SERVER_URL', defaultValue: '', description: '显式 SpacetimeDB server URL,填写后优先于 SPACETIME_SERVER') string(name: 'SPACETIME_ROOT_DIR', defaultValue: '/stdb', description: 'spacetime CLI root-dir;release 自托管默认 /stdb') choice(name: 'INPUT_SOURCE', choices: ['pipeline_archive', 'manual_upload'], description: '导入数据源;pipeline_archive 从导出流水线归档获取,manual_upload 使用本次构建手动上传文件') string(name: 'INPUT_FILE', defaultValue: '', description: 'pipeline_archive 模式可选;留空时使用导出流水线默认归档路径 database-exports/spacetime-migration-<导出构建号>.json') string(name: 'EXPORT_JOB_NAME', defaultValue: 'Genarrative-Database-Export', description: 'pipeline_archive 模式使用的数据库导出流水线作业名') string(name: 'EXPORT_BUILD_NUMBER_TO_IMPORT', defaultValue: '', description: 'pipeline_archive 模式必填,要复制 INPUT_FILE 的导出构建号') stashedFile 'MANUAL_INPUT_FILE' string(name: 'INCLUDE_TABLES', defaultValue: '', description: '可选,逗号分隔的表名白名单') string(name: 'CHUNK_SIZE', defaultValue: '524288', description: '迁移 JSON 分片大小,默认 512KiB,用于规避 HTTP 413') booleanParam(name: 'DRY_RUN', defaultValue: true, description: '只校验导入,不写入数据') booleanParam(name: 'INCREMENTAL', defaultValue: true, description: '增量导入,跳过已存在或冲突的行') booleanParam(name: 'REPLACE_EXISTING', defaultValue: false, description: '覆盖本次文件内涉及的表,不可与 INCREMENTAL 同时启用') booleanParam(name: 'CONFIRM_IMPORT', defaultValue: false, description: 'DRY_RUN=false 时必须勾选') string(name: 'CONFIRM_DATABASE', defaultValue: '', description: 'DRY_RUN=false 时必须填写与 DATABASE 完全一致') string(name: 'CONFIRM_INPUT_FILE', defaultValue: '', description: 'DRY_RUN=false 时必须确认输入文件;pipeline_archive 填实际归档输入路径,manual_upload 填上传原始文件名') booleanParam(name: 'CONFIRM_REPLACE_EXISTING', defaultValue: false, description: 'REPLACE_EXISTING=true 且 DRY_RUN=false 时必须勾选') string(name: 'PRE_IMPORT_BACKUP_DIRECTORY', defaultValue: 'database-pre-import-backups', description: 'Jenkins workspace 内的导入前备份目录,用于归档') string(name: 'SERVER_BACKUP_DIRECTORY', defaultValue: '/var/lib/genarrative/database-backups', description: '可选,额外保存在目标机器上的导入前备份目录;留空则不保存服务器副本') booleanParam(name: 'RUN_SMOKE_TEST', defaultValue: true, description: '导入成功后是否执行服务健康检查') string(name: 'SMOKE_HEALTH_URL', defaultValue: 'http://127.0.0.1:8082/healthz', description: '目标机器本机健康检查地址') string(name: 'TOKEN_CREDENTIAL_ID', defaultValue: '', description: '可选,SpacetimeDB 客户端连接 token 的 Jenkins Secret Text 凭据 ID') string(name: 'BOOTSTRAP_SECRET_CREDENTIAL_ID', defaultValue: '', description: '可选,迁移 bootstrap secret 的 Jenkins Secret Text 凭据 ID') } stages { stage('Prepare') { agent { label 'linux && genarrative-build' } steps { script { if (params.DEPLOY_TARGET == 'release' && !params.CONFIRM_RELEASE_DEPLOY_AGENT) { error('release 数据库导入需要先配置独立 release 部署 agent,并勾选 CONFIRM_RELEASE_DEPLOY_AGENT。') } if (!params.DATABASE?.trim()) { error('DATABASE 不能为空。') } if (!(params.DATABASE.trim() ==~ /^[a-z0-9]+(-[a-z0-9]+)*$/)) { error("DATABASE 必须匹配 ^[a-z0-9]+(-[a-z0-9]+)*$: ${params.DATABASE}") } def inputSource = params.INPUT_SOURCE?.trim() if (!(inputSource in ['pipeline_archive', 'manual_upload'])) { error("INPUT_SOURCE 只能是 pipeline_archive 或 manual_upload,当前值: ${params.INPUT_SOURCE}") } def manualInputFilename = env.MANUAL_INPUT_FILE_FILENAME?.trim() if (inputSource == 'pipeline_archive') { if (!params.EXPORT_JOB_NAME?.trim()) { error('INPUT_SOURCE=pipeline_archive 时 EXPORT_JOB_NAME 不能为空。') } if (!params.EXPORT_BUILD_NUMBER_TO_IMPORT?.trim()) { error('INPUT_SOURCE=pipeline_archive 时 EXPORT_BUILD_NUMBER_TO_IMPORT 不能为空。') } if (!(params.EXPORT_BUILD_NUMBER_TO_IMPORT.trim() ==~ /^[1-9][0-9]*$/)) { error("INPUT_SOURCE=pipeline_archive 时 EXPORT_BUILD_NUMBER_TO_IMPORT 必须是导出流水线构建号: ${params.EXPORT_BUILD_NUMBER_TO_IMPORT}") } def pipelineInputFile = params.INPUT_FILE?.trim() if (!pipelineInputFile) { pipelineInputFile = "database-exports/spacetime-migration-${params.EXPORT_BUILD_NUMBER_TO_IMPORT.trim()}.json" } if (pipelineInputFile.startsWith('/')) { error('INPUT_SOURCE=pipeline_archive 时 INPUT_FILE 必须是 Jenkins 归档内的 workspace 相对路径。') } if (pipelineInputFile.contains('..') || !(pipelineInputFile ==~ /^[A-Za-z0-9._\/-]+$/)) { error("INPUT_SOURCE=pipeline_archive 时 INPUT_FILE 必须是安全的归档相对路径: ${pipelineInputFile}") } if (manualInputFilename) { error('INPUT_SOURCE=pipeline_archive 时不能同时上传 MANUAL_INPUT_FILE。') } env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE = pipelineInputFile } else { if (!manualInputFilename) { error('INPUT_SOURCE=manual_upload 时必须上传 MANUAL_INPUT_FILE。') } if (params.EXPORT_BUILD_NUMBER_TO_IMPORT?.trim()) { error('INPUT_SOURCE=manual_upload 时不能填写 EXPORT_BUILD_NUMBER_TO_IMPORT。') } if (params.INPUT_FILE?.trim()) { error('INPUT_SOURCE=manual_upload 时不能填写 INPUT_FILE;请使用 MANUAL_INPUT_FILE 上传数据源。') } } if (params.INCREMENTAL && params.REPLACE_EXISTING) { error('INCREMENTAL 不能和 REPLACE_EXISTING 同时启用。') } if (!params.DRY_RUN) { if (!params.CONFIRM_IMPORT) { error('DRY_RUN=false 时必须勾选 CONFIRM_IMPORT。') } if (params.CONFIRM_DATABASE?.trim() != params.DATABASE.trim()) { error('DRY_RUN=false 时 CONFIRM_DATABASE 必须与 DATABASE 完全一致。') } if (inputSource == 'pipeline_archive' && params.CONFIRM_INPUT_FILE?.trim() != env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE) { error('DRY_RUN=false 时 CONFIRM_INPUT_FILE 必须与实际归档输入路径完全一致。') } if (inputSource == 'manual_upload' && !params.CONFIRM_INPUT_FILE?.trim()) { error('DRY_RUN=false 且 INPUT_SOURCE=manual_upload 时 CONFIRM_INPUT_FILE 必须填写上传文件原始文件名。') } if (inputSource == 'manual_upload' && params.CONFIRM_INPUT_FILE?.trim() != manualInputFilename) { error('DRY_RUN=false 且 INPUT_SOURCE=manual_upload 时 CONFIRM_INPUT_FILE 必须与上传文件原始文件名完全一致。') } if (params.REPLACE_EXISTING && !params.CONFIRM_REPLACE_EXISTING) { error('REPLACE_EXISTING=true 且 DRY_RUN=false 时必须勾选 CONFIRM_REPLACE_EXISTING。') } } def backupDirectory = params.PRE_IMPORT_BACKUP_DIRECTORY?.trim() ? params.PRE_IMPORT_BACKUP_DIRECTORY.trim() : 'database-pre-import-backups' if (backupDirectory.startsWith('/') || backupDirectory.contains('..') || !(backupDirectory ==~ /^[A-Za-z0-9._\/-]+$/)) { error("PRE_IMPORT_BACKUP_DIRECTORY 必须是安全的相对路径: ${backupDirectory}") } env.PRE_IMPORT_BACKUP_DIRECTORY = backupDirectory env.EFFECTIVE_PRE_IMPORT_BACKUP_NAME = "pre-import-${env.BUILD_NUMBER}.json" } } } stage('Import Database') { agent { label "${params.DEPLOY_TARGET == 'development' ? 'linux && genarrative-build' : 'linux && genarrative-release-deploy'}" } steps { checkout([ $class: 'GitSCM', branches: [[name: "*/${params.SOURCE_BRANCH}"]], doGenerateSubmoduleConfigurations: false, extensions: [[$class: 'CleanBeforeCheckout']], userRemoteConfigs: [[url: "${GIT_REMOTE_URL}"]], ]) sh ''' bash -lc ' set -euo pipefail chmod +x scripts/jenkins-checkout-source.sh SOURCE_BRANCH="${SOURCE_BRANCH:-master}" \ COMMIT_HASH="${COMMIT_HASH:-}" \ GIT_REMOTE_URL="${GIT_REMOTE_URL}" \ SOURCE_COMMIT_FILE=".jenkins-source-commit" \ scripts/jenkins-checkout-source.sh ' ''' script { if (params.INPUT_SOURCE == 'pipeline_archive') { echo "[database-import] 使用归档数据源: job=${params.EXPORT_JOB_NAME}, build=${params.EXPORT_BUILD_NUMBER_TO_IMPORT}, file=${env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE}" copyArtifacts( projectName: params.EXPORT_JOB_NAME, selector: specific(params.EXPORT_BUILD_NUMBER_TO_IMPORT.trim()), filter: "${env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE},${env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE}.sha256", target: '.', fingerprintArtifacts: true ) env.EFFECTIVE_INPUT_FILE = env.EFFECTIVE_PIPELINE_ARCHIVE_INPUT_FILE } else { echo "[database-import] 使用手动上传数据源: original_filename=${env.MANUAL_INPUT_FILE_FILENAME}" sh 'bash -lc "rm -rf manual-import-upload && mkdir -p manual-import-upload"' dir('manual-import-upload') { unstash 'MANUAL_INPUT_FILE' } env.EFFECTIVE_INPUT_FILE = 'manual-import-upload/MANUAL_INPUT_FILE' if (!params.DRY_RUN) { sh ''' bash -lc ' set -euo pipefail manual_filename="${MANUAL_INPUT_FILE_FILENAME:-}" if [[ -z "${manual_filename}" ]]; then echo "[database-import] 无法读取 MANUAL_INPUT_FILE_FILENAME,不能确认手动上传文件名。" >&2 exit 1 fi if [[ "${CONFIRM_INPUT_FILE}" != "${manual_filename}" ]]; then echo "[database-import] CONFIRM_INPUT_FILE 必须与手动上传文件原始文件名一致: ${manual_filename}" >&2 exit 1 fi ' ''' } } def credentialBindings = [] if (params.TOKEN_CREDENTIAL_ID?.trim()) { credentialBindings.add(string(credentialsId: params.TOKEN_CREDENTIAL_ID.trim(), variable: 'GENARRATIVE_SPACETIME_TOKEN')) } if (params.BOOTSTRAP_SECRET_CREDENTIAL_ID?.trim()) { credentialBindings.add(string(credentialsId: params.BOOTSTRAP_SECRET_CREDENTIAL_ID.trim(), variable: 'GENARRATIVE_SPACETIME_MIGRATION_BOOTSTRAP_SECRET')) } def importStep = { sh ''' bash -lc ' set -euo pipefail chmod +x scripts/deploy/maintenance-on.sh scripts/deploy/maintenance-off.sh input_path="${EFFECTIVE_INPUT_FILE}" if [[ "${input_path}" != /* ]]; then input_path="${WORKSPACE}/${input_path}" fi if [[ ! -s "${input_path}" ]]; then echo "[database-import] INPUT_FILE 不存在或为空: ${input_path}" >&2 exit 1 fi backup_dir="${PRE_IMPORT_BACKUP_DIRECTORY}" backup_path="${backup_dir}/${EFFECTIVE_PRE_IMPORT_BACKUP_NAME}" mkdir -p "${backup_dir}" completed=0 on_exit() { local exit_code=$? if [[ "${exit_code}" -ne 0 && "${completed}" -ne 1 ]]; then echo "[database-import] 导入失败,保持维护模式。导入前备份如已生成,会保留在 ${backup_path}。" >&2 fi exit "${exit_code}" } trap on_exit EXIT scripts/deploy/maintenance-on.sh "database import ${DATABASE}" backup_args=(scripts/spacetime-export-migration-json.mjs --out "${backup_path}" --database "${DATABASE}") import_args=(scripts/spacetime-import-migration-json.mjs --in "${input_path}" --database "${DATABASE}") for args_name in backup_args import_args; do declare -n current_args="${args_name}" # server-url 明确指向目标实例时,不再同时透传默认 alias,避免 CLI 授权与 HTTP 导入落到不同目标。 if [[ -n "${SPACETIME_SERVER_URL}" ]]; then current_args+=(--server-url "${SPACETIME_SERVER_URL}") elif [[ -n "${SPACETIME_SERVER}" ]]; then current_args+=(--server "${SPACETIME_SERVER}") fi if [[ -n "${SPACETIME_ROOT_DIR}" ]]; then current_args+=(--root-dir "${SPACETIME_ROOT_DIR}") fi done backup_args+=(--note "jenkins pre-import backup ${BUILD_TAG}") node "${backup_args[@]}" test -s "${backup_path}" sha256sum "${backup_path}" >"${backup_path}.sha256" if [[ -n "${SERVER_BACKUP_DIRECTORY}" ]]; then mkdir -p "${SERVER_BACKUP_DIRECTORY}" install -m 0640 "${backup_path}" "${SERVER_BACKUP_DIRECTORY}/${EFFECTIVE_PRE_IMPORT_BACKUP_NAME}" install -m 0640 "${backup_path}.sha256" "${SERVER_BACKUP_DIRECTORY}/${EFFECTIVE_PRE_IMPORT_BACKUP_NAME}.sha256" fi if [[ -n "${INCLUDE_TABLES}" ]]; then import_args+=(--include "${INCLUDE_TABLES}") fi if [[ -n "${CHUNK_SIZE}" ]]; then import_args+=(--chunk-size "${CHUNK_SIZE}") fi if [[ "${DRY_RUN}" == "true" ]]; then import_args+=(--dry-run) fi if [[ "${INCREMENTAL}" == "true" ]]; then import_args+=(--incremental) fi if [[ "${REPLACE_EXISTING}" == "true" ]]; then import_args+=(--replace-existing) fi import_args+=(--note "jenkins database import ${BUILD_TAG}") node "${import_args[@]}" # 导入成功后只做本机健康检查;业务级数据核验仍以迁移脚本的表级统计为准。 if [[ "${RUN_SMOKE_TEST}" == "true" && -n "${SMOKE_HEALTH_URL}" ]]; then curl -fsS --max-time 10 "${SMOKE_HEALTH_URL}" >/dev/null fi scripts/deploy/maintenance-off.sh completed=1 echo "[database-import] 完成: dry_run=${DRY_RUN}, database=${DATABASE}, source_commit=$(cat .jenkins-source-commit)" ' ''' } if (credentialBindings) { withCredentials(credentialBindings) { importStep() } } else { importStep() } } } post { always { archiveArtifacts artifacts: "${env.PRE_IMPORT_BACKUP_DIRECTORY}/${env.EFFECTIVE_PRE_IMPORT_BACKUP_NAME},${env.PRE_IMPORT_BACKUP_DIRECTORY}/${env.EFFECTIVE_PRE_IMPORT_BACKUP_NAME}.sha256", allowEmptyArchive: true, fingerprint: true } } } } post { always { script { def notificationParameters = [ string(name: 'SOURCE_JOB_NAME', value: env.JOB_NAME), string(name: 'SOURCE_BUILD_NUMBER', value: env.BUILD_NUMBER), string(name: 'SOURCE_BUILD_URL', value: env.BUILD_URL ?: ''), string(name: 'SOURCE_RESULT', value: currentBuild.currentResult ?: 'UNKNOWN'), string(name: 'SOURCE_BRANCH', value: params.SOURCE_BRANCH ?: ''), string(name: 'SOURCE_COMMIT', value: env.SOURCE_COMMIT ?: (params.COMMIT_HASH ?: '')), string(name: 'BUILD_VERSION', value: env.EFFECTIVE_BUILD_VERSION ?: (params.BUILD_VERSION ?: '')), string(name: 'DEPLOY_TARGET', value: params.DEPLOY_TARGET ?: ''), string(name: 'DATABASE', value: params.DATABASE ?: ''), string(name: 'SUMMARY', value: '数据库导入流水线结束'), ] def notificationRecipients = params.NOTIFICATION_EMAILS?.trim() if (notificationRecipients) { notificationParameters.add(string(name: 'EMAIL_RECIPIENTS', value: notificationRecipients)) } try { build job: 'Genarrative-Notify-Email', wait: false, propagate: false, parameters: notificationParameters } catch (error) { echo "邮件通知触发失败: ${error.message}" } } } success { echo "数据库导入流水线完成: target=${params.DEPLOY_TARGET}, database=${params.DATABASE}, dryRun=${params.DRY_RUN}" } } }