This repository has been archived by the owner on Dec 21, 2021. It is now read-only.
forked from airbytehq/airbyte
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.env
97 lines (77 loc) · 3.06 KB
/
.env
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
VERSION=0.34.4-alpha
# Airbyte Internal Job Database, see https://docs.airbyte.io/operator-guides/configuring-airbyte-db
DATABASE_USER=docker
DATABASE_PASSWORD=docker
DATABASE_HOST=db
DATABASE_PORT=5432
DATABASE_DB=airbyte
# translate manually DATABASE_URL=jdbc:postgresql://${DATABASE_HOST}:${DATABASE_PORT/${DATABASE_DB} (do not include the username or password here)
DATABASE_URL=jdbc:postgresql://db:5432/airbyte
JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.29.15.001
# Airbyte Internal Config Database, default to reuse the Job Database when they are empty
# Usually you do not need to set them; they are explicitly left empty to mute docker compose warnings
CONFIG_DATABASE_USER=
CONFIG_DATABASE_PASSWORD=
CONFIG_DATABASE_URL=
CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.30.22.001
RUN_DATABASE_MIGRATION_ON_STARTUP=true
# When using the airbyte-db via default docker image:
CONFIG_ROOT=/data
DATA_DOCKER_MOUNT=airbyte_data
DB_DOCKER_MOUNT=airbyte_db
# Temporal.io worker configuration
TEMPORAL_HOST=airbyte-temporal:7233
# Workspace storage for running jobs (logs, etc)
WORKSPACE_ROOT=/tmp/workspace
WORKSPACE_DOCKER_MOUNT=airbyte_workspace
# Local mount to access local files from filesystem
# todo (cgardens) - when we are mount raw directories instead of named volumes, *_DOCKER_MOUNT must
# be the same as *_ROOT.
# Issue: https://github.com/airbytehq/airbyte/issues/578
LOCAL_ROOT=/tmp/airbyte_local
LOCAL_DOCKER_MOUNT=/tmp/airbyte_local
# todo (cgardens) - hack to handle behavior change in docker compose. *_PARENT directories MUST
# already exist on the host filesystem and MUST be parents of *_ROOT.
# Issue: https://github.com/airbytehq/airbyte/issues/577
HACK_LOCAL_ROOT_PARENT=/tmp
# Maximum simultaneous jobs
SUBMITTER_NUM_THREADS=10
# Job container images
# Usually you should not need to set these, they have defaults already set
JOB_POD_SOCAT_IMAGE=
JOB_POD_BUSYBOX_IMAGE=
JOB_POD_CURL_IMAGE=
# Miscellaneous
TRACKING_STRATEGY=segment
WEBAPP_URL=http://localhost:8000/
API_URL=/api/v1/
INTERNAL_API_HOST=airbyte-server:8001
LOG_LEVEL=INFO
WORKER_ENVIRONMENT=docker
# Cloud log backups. Don't use this unless you know what you're doing. Mainly for Airbyte devs.
# If you just want to capture Docker logs, you probably want to use something like this instead:
# https://docs.docker.com/config/containers/logging/configure/
S3_LOG_BUCKET=
S3_LOG_BUCKET_REGION=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
S3_MINIO_ENDPOINT=
S3_PATH_STYLE_ACCESS=
GCS_LOG_BUCKET=
# Docker Resource Limits
JOB_POD_MAIN_CONTAINER_CPU_REQUEST=
JOB_POD_MAIN_CONTAINER_CPU_LIMIT=
JOB_POD_MAIN_CONTAINER_MEMORY_REQUEST=
JOB_POD_MAIN_CONTAINER_MEMORY_LIMIT=
# Max attempts per sync and max retries per attempt
SYNC_JOB_MAX_ATTEMPTS=3
# Time in days to reach a timeout to cancel the synchronization
SYNC_JOB_MAX_TIMEOUT_DAYS=3
# Set secret persistence store to use. Do not change this for existing installations!
SECRET_PERSISTENCE=NONE
# State Cloud Storage
STATE_STORAGE_S3_BUCKET_NAME=
STATE_STORAGE_S3_REGION=
STATE_STORAGE_MINIO_BUCKET_NAME=
STATE_STORAGE_MINIO_ENDPOINT=
STATE_STORAGE_GCS_BUCKET_NAME=