# Default values for deps. # This is a YAML-formatted file. # Declare variables to be passed into your templates. # MySQL configurations for helm dependency # you can find more details about this here https://artifacthub.io/packages/helm/bitnami/mysql global: security: allowInsecureImages: true cronJobLabels: {} mysql: enabled: true fullnameOverride: "mysql" architecture: standalone image: registry: docker.io repository: bitnamilegacy/mysql tag: 8.0.37-debian-12-r2 pullPolicy: "Always" auth: rootPassword: password # to be provided by CI/CD primary: extraFlags: "--sort_buffer_size=10M" persistence: size: 50Gi service: nodePort: 3306 initdbScripts: init_openmetadata_db_scripts.sql: | CREATE DATABASE openmetadata_db; CREATE USER 'openmetadata_user'@'%' IDENTIFIED BY 'openmetadata_password'; GRANT ALL PRIVILEGES ON openmetadata_db.* TO 'openmetadata_user'@'%' WITH GRANT OPTION; commit; init_airflow_db_scripts.sql: | CREATE DATABASE airflow_db CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; CREATE USER 'airflow_user'@'%' IDENTIFIED BY 'airflow_pass'; GRANT ALL PRIVILEGES ON airflow_db.* TO 'airflow_user'@'%' WITH GRANT OPTION; commit; # OpenSearch Helm Dependency # you can find more details about this here https://artifacthub.io/packages/helm/opensearch-project-helm-charts/opensearch/2.12.2 opensearch: enabled: true clusterName: opensearch fullnameOverride: opensearch nodeGroup: "" imagePullPolicy: Always opensearchJavaOpts: "-Xmx1g -Xms1g" persistence: size: 30Gi protocol: http config: opensearch.yml: | plugins.security.disabled: true indices.query.bool.max_clause_count: 4096 singleNode: true resources: requests: cpu: "100m" memory: "256M" limits: cpu: "2000m" memory: "2048M" # Airflow configurations for helm dependency # you can find more details about this here https://airflow.apache.org/docs/helm-chart/ airflow: enabled: true # Static secret key for Airflow webserver (strongly recommended for Airflow 3) # Without this, JWT token authentication may fail between Airflow components # Generate a new key with: openssl rand -hex 32 # Set to empty string (~) to auto-generate (not recommended for production) webserverSecretKey: "a5f8c3e2d1b9a7f6e4c3b2a1f9e8d7c6b5a4f3e2d1c9b8a7f6e5d4c3b2a1f0e9" # Use OpenMetadata Airflow image with Airflow 3 images: airflow: repository: docker.getcollate.io/openmetadata/ingestion tag: 1.11.10 pullPolicy: "IfNotPresent" # Use KubernetesExecutor for production deployments (recommended) # For local development (Docker Desktop/Minikube), use LocalExecutor instead # Note: KubernetesExecutor requires shared DAGs storage (RWX PVC) which isn't available in Docker Desktop executor: "KubernetesExecutor" # Environment variables for Airflow configuration env: # This is required for OpenMetadata UI to fetch status of DAGs - name: AIRFLOW__API__AUTH_BACKENDS value: "airflow.api.auth.backend.session,airflow.api.auth.backend.basic_auth" # OpenMetadata Airflow Apis Plugin DAGs Configuration - name: AIRFLOW__OPENMETADATA_AIRFLOW_APIS__DAG_GENERATED_CONFIGS value: "/opt/airflow/dags" # OpenMetadata Airflow Secrets Manager Configuration - name: AIRFLOW__OPENMETADATA_SECRETS_MANAGER__AWS_REGION value: "" - name: AIRFLOW__OPENMETADATA_SECRETS_MANAGER__AWS_ACCESS_KEY_ID value: "" - name: AIRFLOW__OPENMETADATA_SECRETS_MANAGER__AWS_ACCESS_KEY value: "" # Workaround for Airflow 3 + MySQL compatibility issue # Downgrade FAB provider to avoid CREATE INDEX IF NOT EXISTS issue - name: _PIP_ADDITIONAL_REQUIREMENTS value: "apache-airflow-providers-fab==2.4.4" # Create admin user webserver: defaultUser: enabled: true role: Admin username: admin email: spiderman@superhero.org firstName: Peter lastName: Parker password: admin # Disable internal PostgreSQL, use external MySQL postgresql: enabled: false # Worker configuration for KubernetesExecutor # Set replicas to 0 for LocalExecutor (local development) workers: replicas: 2 # Disable Flower flower: enabled: false # Disable internal Redis redis: enabled: false # Configure external MySQL database # Using downgraded FAB provider (2.4.4) to avoid CREATE INDEX IF NOT EXISTS issue data: metadataConnection: user: airflow_user pass: airflow_pass protocol: mysql host: mysql port: 3306 db: airflow_db sslmode: disable # Service account configuration (required for KubernetesExecutor) # DAGs persistence configuration dags: persistence: enabled: true storageClassName: "" size: 1Gi # Logs persistence configuration logs: persistence: enabled: true storageClassName: "" size: 1Gi # API server needs access to DAGs volume for OpenMetadata dynamic DAG generation apiServer: extraVolumes: - name: dags persistentVolumeClaim: claimName: '{{ include "airflow.fullname" . }}-dags' extraVolumeMounts: - name: dags mountPath: /opt/airflow/dags