tool-databridge/k8s/job-templates/log-import-ctllog-job.yaml
mingsheng.li 55279cd40d
All checks were successful
continuous-integration/drone/push Build is passing
log format data inporting script
2025-09-05 12:14:07 +08:00

59 lines
1.6 KiB
YAML

apiVersion: batch/v1
kind: Job
metadata:
name: log-import-ctllog-job-{{JOB_ID}}
namespace: {{NAMESPACE}}
spec:
ttlSecondsAfterFinished: 86400
backoffLimit: 0
template:
spec:
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: {{JOB_HOST_KEY}}
operator: In
values:
- {{JOB_HOST_NAME}}
containers:
- name: importer
image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}}
args: ["--pipeline", "log_to_postgres_ctllog"]
env:
- name: DATA_PVC_MOUNT_PATH
value: "/data"
- name: DBF_INPUT_DIR
value: "/data/dbf-input"
- name: MAPPING_FILE
value: "/data/disney-mapping-v2.xlsx"
- name: DB_HOST
value: "{{DB_HOST}}"
- name: DB_PORT
value: "{{DB_PORT}}"
- name: DB_NAME
value: "{{DB_NAME}}"
- name: DB_USER
value: "{{DB_USER}}"
- name: DB_PASSWORD
value: "{{DB_PASSWORD}}"
- name: BATCH_SIZE
value: "{{BATCH_SIZE}}"
- name: LOG_LEVEL
value: "{{LOG_LEVEL}}"
volumeMounts:
- name: data-volume
mountPath: "/data"
resources:
requests:
cpu: "500m"
memory: "800Mi"
limits:
cpu: "1000m"
memory: "1700Mi"
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: {{DATA_PVC_NAME}}
restartPolicy: Never