zitadel/build/local/docker-compose-local.yml
Elio Bischof aa2a1848da
feat: add stdout and filesystem notification channels (#2925)
* feat: add filesystem and stdout notification channels

* configure through env vars

* compile

* feat: add compact option for debug notification channels

* fix channel mock generation

* avoid sensitive information in error message

Co-authored-by: Livio Amstutz <livio.a@gmail.com>

* add review improvements

Co-authored-by: Livio Amstutz <livio.a@gmail.com>
2022-01-06 09:00:24 +01:00

229 lines
5.8 KiB
YAML

version: '3.8'
services:
db:
profiles: ['backend', 'storage']
restart: always
networks:
- zitadel
image: cockroachdb/cockroach:v21.2.3
command: start-single-node --insecure --listen-addr=0.0.0.0
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/health?ready=1"]
interval: 10s
timeout: 30s
retries: 5
start_period: 20s
ports:
- 8080:8080
- 26257:26257
# schema changes on the database
db-migrations:
profiles: ['backend', 'storage']
restart: on-failure
networks:
- zitadel
depends_on:
db:
condition: service_healthy
image: flyway/flyway:latest
volumes:
- ../../migrations/cockroach:/flyway/sql
environment:
- FLYWAY_PLACEHOLDERS_eventstorepassword=NULL
- FLYWAY_PLACEHOLDERS_managementpassword=NULL
- FLYWAY_PLACEHOLDERS_adminapipassword=NULL
- FLYWAY_PLACEHOLDERS_authpassword=NULL
- FLYWAY_PLACEHOLDERS_notificationpassword=NULL
- FLYWAY_PLACEHOLDERS_authzpassword=NULL
- FLYWAY_PLACEHOLDERS_queriespassword=NULL
command: -url=jdbc:postgresql://db:26257/defaultdb -user=root -password= -connectRetries=5 migrate
# minio is used to store assets
minio:
profiles: ['backend', 'storage']
image: minio/minio:RELEASE.2021-06-14T01-29-23Z
restart: on-failure
networks:
- zitadel
environment:
- MINIO_ACCESS_KEY=access_key
- MINIO_SECRET_KEY=secret_key
healthcheck:
test: ['CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live']
interval: 30s
timeout: 5s
retries: 3
start_period: 5s
command:
- gateway
- nas
- /export
# ZITADEL needs several keys to encrypt data
# this container generates the required keys
# and stores them into zitadel/.keys
keys:
profiles: ['backend', 'backend-stub']
restart: on-failure
networks:
- zitadel
build:
context: ../..
dockerfile: build/local/Dockerfile.keys
target: gen-keys
volumes:
- ../../.:/zitadel
env_file:
- ./local.env
# To interact with ZITADEL requires some data setted up.
# Due to the evolution of ZITADEL it's required to add additional
# setup steps, because of this fact it's recommended to rerun the setup
# on each restart, at least after a new version got released
backend-setup:
profiles: ['backend']
restart: on-failure
networks:
- zitadel
depends_on:
db-migrations:
condition: service_completed_successfully
keys:
condition: service_completed_successfully
build:
context: ../..
dockerfile: build/zitadel/Dockerfile
target: dev-go-build
args:
ENV: dev
volumes:
- ../../.keys:/go/src/github.com/caos/zitadel/.keys
env_file:
- ./local.env
environment:
- ZITADEL_EVENTSTORE_HOST=db
command:
[
'-setup-files=cmd/zitadel/setup.yaml',
'-setup-files=cmd/zitadel/system-defaults.yaml',
'-setup-files=cmd/zitadel/authz.yaml',
'setup',
]
# starts the backend (API's) of ZITADEL
# Port 50001 serves the GRPC API
# Port 50002 serves the REST API
# Port 50003 serves the login GUI
backend-run:
profiles: ['backend']
restart: on-failure
networks:
- zitadel
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:50002/management/v1/healthz"]
interval: 10s
timeout: 30s
retries: 5
start_period: 20s
depends_on:
db:
condition: service_healthy
minio:
condition: service_healthy
backend-setup:
condition: service_completed_successfully
build:
context: ../..
dockerfile: build/zitadel/Dockerfile
target: dev-go-build
args:
ENV: dev
volumes:
- ../../.keys:/go/src/github.com/caos/zitadel/.keys
- ../../.notifications:/go/src/github.com/caos/zitadel/.notifications
env_file:
- ./local.env
environment:
- ZITADEL_EVENTSTORE_HOST=db
ports:
- 50001:50001
- 50002:50002
- 50003:50003
command:
[
'-console=false',
'-localDevMode=true',
'-config-files=cmd/zitadel/startup.yaml',
'-config-files=cmd/zitadel/system-defaults.yaml',
'-config-files=cmd/zitadel/authz.yaml',
'start',
]
# the GRPC web gateway proxies the
# GRPC web calls to GRPC
# it's used in console (frontend) for example
grpc-web-gateway:
profiles: ['backend']
restart: on-failure
logging:
driver: none
depends_on:
backend-run:
condition: service_healthy
networks:
- zitadel
build:
context: ../..
dockerfile: build/local/Dockerfile.gateway
image: grpcweb/grpcwebproxy
ports:
- '50000:8080'
# this service generates the environemnt.json
# needed in console.
# It curl's the client id of console and writes it to
# the environment.json
client-id:
profiles: ['frontend', 'console-stub']
depends_on:
backend-run:
condition: service_healthy
networks:
- zitadel
build:
context: ../..
dockerfile: build/local/Dockerfile.clientid
target: client-id
volumes:
- ./environment.json:/environment.json
environment:
- HOST=backend-run
- PORT=50002
# starts console in development mode
frontend-run:
profiles: ['frontend']
networks:
- zitadel
depends_on:
grpc-web-gateway:
condition: service_started
client-id:
condition: service_completed_successfully
build:
context: ../..
dockerfile: build/console/Dockerfile
target: dev-angular-build
args:
ENV: dev
volumes:
- ./environment.json:/console/src/assets/environment.json
command: sh -c "ng serve --host 0.0.0.0 --disable-host-check"
ports:
- 4200:4200
networks:
zitadel: {}