config.py 1.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. # config.py
  2. import os
  3. # PostgreSQL 连接信息
  4. PG_CONFIG = {
  5. "host": "192.168.67.10",
  6. "port": 5432,
  7. "user": "postgres",
  8. "password": "postgres",
  9. "database": "dataops",
  10. }
  11. # Neo4j 连接信息
  12. NEO4J_CONFIG = {
  13. "uri": "bolt://192.168.67.1:7687",
  14. "user": "neo4j",
  15. "password": "Passw0rd",
  16. }
  17. # Airflow 自身配置(如果有需要,例如用 REST API 触发其他 DAG)
  18. AIRFLOW_CONFIG = {
  19. "base_url": "http://localhost:8080",
  20. "username": "admin",
  21. "password": "admin",
  22. }
  23. # 任务重试配置
  24. TASK_RETRY_CONFIG = {
  25. "retries": 2, # 重试次数
  26. "retry_delay_minutes": 1 # 重试延迟(分钟)
  27. }
  28. # 脚本文件基础路径配置
  29. # 部署到 Airflow 环境时使用此路径
  30. AIRFLOW_BASE_PATH='/opt/airflow'
  31. DATAOPS_DAGS_PATH = os.path.join(AIRFLOW_BASE_PATH, 'dags')
  32. SCRIPTS_BASE_PATH = os.path.join(AIRFLOW_BASE_PATH, 'dataops_scripts')
  33. # 上传的CSV/EXCEL文件的基准上传路径
  34. STRUCTURE_UPLOAD_BASE_PATH ="/data/csv"
  35. STRUCTURE_UPLOAD_ARCHIVE_BASE_PATH ="/data/archive"
  36. # 本地开发环境脚本路径(如果需要区分环境)
  37. # LOCAL_SCRIPTS_BASE_PATH = "/path/to/local/scripts"
  38. # 执行计划保留的数量
  39. EXECUTION_PLAN_KEEP_COUNT = 5
  40. # ETL作业幂等性开关
  41. ENABLE_ETL_IDEMPOTENCY = True