初始合入databridge,用于后续数据的导出导入

This commit is contained in:
2025-07-25 11:54:20 +08:00
commit 71045d7531
22 changed files with 815 additions and 0 deletions

View File

@@ -0,0 +1,40 @@
apiVersion: batch/v1
kind: Job
metadata:
name: csv-export-job-{{JOB_ID}}
spec:
ttlSecondsAfterFinished: 86400
template:
spec:
containers:
- name: exporter
image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}}
args: ["--pipeline", "csv_export"]
env:
- name: DATA_PVC_MOUNT_PATH
value: "/data"
- name: OUTPUT_DIR
value: "/data/csv-exports"
- name: EXPORT_QUERY
value: "{{EXPORT_QUERY}}"
- name: DB_HOST
value: "{{DB_HOST}}"
- name: DB_PORT
value: "{{DB_PORT}}"
- name: DB_NAME
value: "{{DB_NAME}}"
- name: DB_USER
value: "{{DB_USER}}"
- name: DB_PASSWORD
value: "{{DB_PASSWORD}}"
- name: LOG_LEVEL
value: "{{LOG_LEVEL}}"
volumeMounts:
- name: data-volume
mountPath: "/data"
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: {{DATA_PVC_NAME}}
restartPolicy: Never
backoffLimit: 1

View File

@@ -0,0 +1,42 @@
apiVersion: batch/v1
kind: Job
metadata:
name: dbf-import-job-{{JOB_ID}}
spec:
ttlSecondsAfterFinished: 86400
template:
spec:
containers:
- name: importer
image: {{IMAGE_REPO}}/databridge:{{IMAGE_TAG}}
args: ["--pipeline", "dbf_to_postgres"]
env:
- name: DATA_PVC_MOUNT_PATH
value: "/data"
- name: DBF_INPUT_DIR
value: "/data/dbf-input"
- name: MAPPING_FILE
value: "/data/mapping.xlsx"
- name: DB_HOST
value: "{{DB_HOST}}"
- name: DB_PORT
value: "{{DB_PORT}}"
- name: DB_NAME
value: "{{DB_NAME}}"
- name: DB_USER
value: "{{DB_USER}}"
- name: DB_PASSWORD
value: "{{DB_PASSWORD}}"
- name: BATCH_SIZE
value: "{{BATCH_SIZE}}"
- name: LOG_LEVEL
value: "{{LOG_LEVEL}}"
volumeMounts:
- name: data-volume
mountPath: "/data"
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: {{DATA_PVC_NAME}}
restartPolicy: Never
backoffLimit: 1

22
k8s/pv.yaml Normal file
View File

@@ -0,0 +1,22 @@
apiVersion: v1
kind: PersistentVolume
metadata:
name: zfs-data-import-export-pv # 任意,但要保证唯一
spec:
capacity:
storage: 50Gi # 与数据集配额一致即可
accessModes:
- ReadWriteOnce
persistentVolumeReclaimPolicy: Retain # 删除 PV 时保留数据
storageClassName: "" # 留空,防止动态 Provisioner 抢占
volumeMode: Filesystem
local:
path: /data/data-import-export # ← 指向节点上 ZFS 挂载目录
nodeAffinity:
required:
nodeSelectorTerms:
- matchExpressions:
- key: openebs.io/nodeid
operator: In
values:
- node008-zina

13
k8s/pvc.yaml Normal file
View File

@@ -0,0 +1,13 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: data-import-export-pvc
namespace: db # 与 Job 同命名空间
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 50Gi
storageClassName: "" # 必须与 PV 一致
volumeName: zfs-data-import-export-pv # ← 显式绑定到刚才创建的 PV

28
k8s/rbac.yaml Normal file
View File

@@ -0,0 +1,28 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: databridge-role
rules:
- apiGroups: ["batch"]
resources: ["jobs"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
- apiGroups: [""]
resources: ["pods", "pods/log"]
verbs: ["get", "list", "watch"]
- apiGroups: [""]
resources: ["persistentvolumeclaims"]
verbs: ["get", "list", "create"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: databridge-role-binding
subjects:
- kind: ServiceAccount
name: default
namespace: default
roleRef:
kind: Role
name: databridge-role
apiGroup: rbac.authorization.k8s.io