修改job定义文件
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
mingsheng.li 2025-07-26 19:39:07 +08:00
parent 24da36b0b0
commit d995cbe05e
3 changed files with 32 additions and 19 deletions

View File

@ -2,6 +2,7 @@ apiVersion: batch/v1
kind: Job
metadata:
name: dbf-import-job-{{JOB_ID}}
namespace: {{NAMESPACE}}
spec:
ttlSecondsAfterFinished: 86400
template:
@ -14,9 +15,9 @@ spec:
- name: DATA_PVC_MOUNT_PATH
value: "/data"
- name: DBF_INPUT_DIR
value: "/data/dbf-input"
value: "/data/data-import-export/dbf-input"
- name: MAPPING_FILE
value: "/data/mapping.xlsx"
value: "/data/data-import-export/mapping.xlsx"
- name: DB_HOST
value: "{{DB_HOST}}"
- name: DB_PORT
@ -36,12 +37,13 @@ spec:
mountPath: "/data"
resources:
requests:
cpu: "500m" # 0.5 个 CPU 核心
memory: "1Gi" # 1 GB 内存
cpu: "500m"
memory: "1Gi"
limits:
cpu: "1000m" # 2 个 CPU 核心
memory: "2Gi" # 4 GB 内存
cpu: "1000m"
memory: "2Gi"
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: {{DATA_PVC_NAME}}
claimName: {{DATA_PVC_NAME}}
restartPolicy: Never

View File

@ -8,18 +8,14 @@ IMAGE_TAG=${IMAGE_TAG:-"dev"}
BATCH_SIZE=${BATCH_SIZE:-"1000"}
LOG_LEVEL=${LOG_LEVEL:-"INFO"}
DATA_PVC_NAME=${DATA_PVC_NAME:-"data-import-export-pvc"}
# todo: 下面参数使用时需要修改
# 据库配置(使用时需要修改
DB_HOST=${DB_HOST:-"test-db.db.svc.cluster.local"}
DB_PORT=${DB_PORT:-"6432"}
DB_NAME=${DB_NAME:-"idrc"}
DB_USER=${DB_USER:-"idrc"}
DB_PASSWORD=${DB_PASSWORD:-"a8aa283c1b3ca0bdfe1d2669dd400f3d"}
# 导出变量用于envsubst
export JOB_ID IMAGE_REPO IMAGE_TAG DATA_PVC_NAME
export DB_HOST DB_PORT DB_NAME DB_USER DB_PASSWORD
export BATCH_SIZE LOG_LEVEL
NAMESPACE=${NAMESPACE:-"db"}
# 检查模板文件
TEMPLATE_FILE="dbf-import-job.yaml"
@ -28,12 +24,28 @@ if [ ! -f "$TEMPLATE_FILE" ]; then
exit 1
fi
# 处理模板
# 直接替换模板变量不使用envsubst
OUTPUT_FILE="dbf-import-job-${JOB_ID}.yaml"
envsubst < "$TEMPLATE_FILE" > "$OUTPUT_FILE"
sed -e "s|{{JOB_ID}}|$JOB_ID|g" \
-e "s|{{NAMESPACE}}|$NAMESPACE|g" \
-e "s|{{IMAGE_REPO}}|$IMAGE_REPO|g" \
-e "s|{{IMAGE_TAG}}|$IMAGE_TAG|g" \
-e "s|{{DATA_PVC_NAME}}|$DATA_PVC_NAME|g" \
-e "s|{{DB_HOST}}|$DB_HOST|g" \
-e "s|{{DB_PORT}}|$DB_PORT|g" \
-e "s|{{DB_NAME}}|$DB_NAME|g" \
-e "s|{{DB_USER}}|$DB_USER|g" \
-e "s|{{DB_PASSWORD}}|$DB_PASSWORD|g" \
-e "s|{{BATCH_SIZE}}|$BATCH_SIZE|g" \
-e "s|{{LOG_LEVEL}}|$LOG_LEVEL|g" \
"$TEMPLATE_FILE" > "$OUTPUT_FILE"
# 部署前验证
echo "Validating generated YAML..."
kubectl apply -f "$OUTPUT_FILE" -n "$NAMESPACE" --dry-run=client
# 部署Job
kubectl apply -f "$OUTPUT_FILE"
kubectl apply -f "$OUTPUT_FILE" -n "$NAMESPACE"
echo "Job deployed: dbf-import-job-${JOB_ID}"
echo "To view logs: kubectl logs job/dbf-import-job-${JOB_ID}"
echo "Job deployed in namespace $NAMESPACE: dbf-import-job-${JOB_ID}"
echo "To view logs: kubectl logs job/dbf-import-job-${JOB_ID} -n $NAMESPACE"

View File

@ -72,7 +72,6 @@ class DbfToPostgresPipeline(BasePipeline):
mapping_dict = self.load_mapping()
# 连接数据库
# todo本地调试时打开
db_config = self.config.get_database_config()
self.db = Database(**db_config)