初始合入databridge,用于后续数据的导出导入

This commit is contained in:
2025-07-25 11:54:20 +08:00
commit 71045d7531
22 changed files with 815 additions and 0 deletions

View File

@@ -0,0 +1,37 @@
#!/bin/bash
set -e
# 默认配置
JOB_ID=$(date +%Y%m%d-%H%M%S)
IMAGE_REPO=${IMAGE_REPO:-"harbor.dc.teramesh.cn/idrc/tools"}
IMAGE_TAG=${IMAGE_TAG:-"latest"}
DATA_PVC_NAME=${DATA_PVC_NAME:-"databridge-data-pvc"}
DB_HOST=${DB_HOST:-"postgres-service"}
DB_PORT=${DB_PORT:-"5432"}
DB_NAME=${DB_NAME:-"energy_data"}
DB_USER=${DB_USER:-"db_user"}
DB_PASSWORD=${DB_PASSWORD:-"db_password"}
EXPORT_QUERY=${EXPORT_QUERY:-"SELECT * FROM source_table"}
LOG_LEVEL=${LOG_LEVEL:-"INFO"}
# 导出变量用于envsubst
export JOB_ID IMAGE_REPO IMAGE_TAG DATA_PVC_NAME
export DB_HOST DB_PORT DB_NAME DB_USER DB_PASSWORD
export EXPORT_QUERY LOG_LEVEL
# 检查模板文件
TEMPLATE_FILE="../k8s/job-templates/csv-export-job.yaml"
if [ ! -f "$TEMPLATE_FILE" ]; then
echo "Template file not found: $TEMPLATE_FILE"
exit 1
fi
# 处理模板
OUTPUT_FILE="../k8s/jobs/csv-export-${JOB_ID}.yaml"
envsubst < "$TEMPLATE_FILE" > "$OUTPUT_FILE"
# 部署Job
kubectl apply -f "$OUTPUT_FILE"
echo "Job deployed: databridge-csv-export-${JOB_ID}"
echo "To view logs: kubectl logs job/databridge-csv-export-${JOB_ID}"

View File

@@ -0,0 +1,39 @@
#!/bin/bash
set -e
# 默认配置
JOB_ID=$(date +%Y%m%d-%H%M%S)
IMAGE_REPO=${IMAGE_REPO:-"harbor.dc.teramesh.cn/idrc/tools"}
IMAGE_TAG=${IMAGE_TAG:-"latest"}
BATCH_SIZE=${BATCH_SIZE:-"1000"}
LOG_LEVEL=${LOG_LEVEL:-"INFO"}
DATA_PVC_NAME=${DATA_PVC_NAME:-"data-import-export-pvc"}
# todo: 下面参数使用时需要修改
DB_HOST=${DB_HOST:-"xx-postgres-service"}
DB_PORT=${DB_PORT:-"5432"}
DB_NAME=${DB_NAME:-"xx"}
DB_USER=${DB_USER:-"xx_db_user"}
DB_PASSWORD=${DB_PASSWORD:-"xx_db_password"}
# 导出变量用于envsubst
export JOB_ID IMAGE_REPO IMAGE_TAG DATA_PVC_NAME
export DB_HOST DB_PORT DB_NAME DB_USER DB_PASSWORD
export BATCH_SIZE LOG_LEVEL
# 检查模板文件
TEMPLATE_FILE="../k8s/job-templates/dbf-import-job.yaml"
if [ ! -f "$TEMPLATE_FILE" ]; then
echo "Template file not found: $TEMPLATE_FILE"
exit 1
fi
# 处理模板
OUTPUT_FILE="../k8s/jobs/dbf-import-job-${JOB_ID}.yaml"
envsubst < "$TEMPLATE_FILE" > "$OUTPUT_FILE"
# 部署Job
kubectl apply -f "$OUTPUT_FILE"
echo "Job deployed: dbf-import-job-${JOB_ID}"
echo "To view logs: kubectl logs job/dbf-import-job-${JOB_ID}"