apiVersion: batch/v1 kind: Job metadata: name: dbf-import-job-{{JOB_ID}} spec: ttlSecondsAfterFinished: 86400 template: spec: containers: - name: importer image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}} args: ["--pipeline", "dbf_to_postgres"] env: - name: DATA_PVC_MOUNT_PATH value: "/data" - name: DBF_INPUT_DIR value: "/data/dbf-input" - name: MAPPING_FILE value: "/data/mapping.xlsx" - name: DB_HOST value: "{{DB_HOST}}" - name: DB_PORT value: "{{DB_PORT}}" - name: DB_NAME value: "{{DB_NAME}}" - name: DB_USER value: "{{DB_USER}}" - name: DB_PASSWORD value: "{{DB_PASSWORD}}" - name: BATCH_SIZE value: "{{BATCH_SIZE}}" - name: LOG_LEVEL value: "{{LOG_LEVEL}}" volumeMounts: - name: data-volume mountPath: "/data" resources: requests: cpu: "500m" # 0.5 个 CPU 核心 memory: "1Gi" # 1 GB 内存 limits: cpu: "1000m" # 2 个 CPU 核心 memory: "2Gi" # 4 GB 内存 volumes: - name: data-volume persistentVolumeClaim: claimName: {{DATA_PVC_NAME}}