apiVersion: batch/v1 kind: Job metadata: name: dbf-import-job-{{JOB_ID}} spec: ttlSecondsAfterFinished: 86400 template: spec: containers: - name: importer image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}} args: ["--pipeline", "dbf_to_postgres"] env: - name: DATA_PVC_MOUNT_PATH value: "/data" - name: DBF_INPUT_DIR value: "/data/dbf-input" - name: MAPPING_FILE value: "/data/mapping.xlsx" - name: DB_HOST value: "{{DB_HOST}}" - name: DB_PORT value: "{{DB_PORT}}" - name: DB_NAME value: "{{DB_NAME}}" - name: DB_USER value: "{{DB_USER}}" - name: DB_PASSWORD value: "{{DB_PASSWORD}}" - name: BATCH_SIZE value: "{{BATCH_SIZE}}" - name: LOG_LEVEL value: "{{LOG_LEVEL}}" volumeMounts: - name: data-volume mountPath: "/data" volumes: - name: data-volume persistentVolumeClaim: claimName: {{DATA_PVC_NAME}} restartPolicy: Never backoffLimit: 1