tool-databridge/k8s/job-templates/dbf-import-ctllog-pwc-job.yaml
mingsheng.li af11bddb60
All checks were successful
continuous-integration/drone/push Build is passing
增加电表历史数据导入的脚本
2025-08-05 14:17:32 +08:00

59 lines
1.6 KiB
YAML

apiVersion: batch/v1
kind: Job
metadata:
name: dbf-import-ctllog-pwc-job-{{JOB_ID}}
namespace: {{NAMESPACE}}
spec:
ttlSecondsAfterFinished: 86400
backoffLimit: 0
template:
spec:
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: {{JOB_HOST_KEY}}
operator: In
values:
- {{JOB_HOST_NAME}}
containers:
- name: importer
image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}}
args: ["--pipeline", "dbf_to_postgres_ctllog-pwc"]
env:
- name: DATA_PVC_MOUNT_PATH
value: "/data"
- name: DBF_INPUT_DIR
value: "/data/dbf-input"
- name: MAPPING_FILE
value: "/data/disney-mapping-elec-v3.xlsx"
- name: DB_HOST
value: "{{DB_HOST}}"
- name: DB_PORT
value: "{{DB_PORT}}"
- name: DB_NAME
value: "{{DB_NAME}}"
- name: DB_USER
value: "{{DB_USER}}"
- name: DB_PASSWORD
value: "{{DB_PASSWORD}}"
- name: BATCH_SIZE
value: "{{BATCH_SIZE}}"
- name: LOG_LEVEL
value: "{{LOG_LEVEL}}"
volumeMounts:
- name: data-volume
mountPath: "/data"
resources:
requests:
cpu: "500m"
memory: "800Mi"
limits:
cpu: "1000m"
memory: "1700Mi"
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: {{DATA_PVC_NAME}}
restartPolicy: Never