Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add docker-compose config values for prod profile #22

Merged
merged 4 commits into from
Jan 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 1 addition & 8 deletions docker-compose/harness/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ docker-compose down -v
```
2) Re-run docker compose
```shell
docker compose up -d
docker-compose up -d
```

## Advanced Configuration
Expand All @@ -72,10 +72,3 @@ You simply need to set the `HARNESS_HOST` environment variable, see [Set hostnam
```shell
export HARNESS_HOST="192.168.0.1"
```

## Airgapped installation
The above scripts will require connectivity to Dockerhub. In case connectivity to Dockerhub is not available, you can follow the steps as below
1) Use the script *download_images.sh* to download the docker images.
2) You can then tar this entire folder and copy it to the target VM
3) Use the script *unpack_images.sh* to load the images onto the VM.
4) Use the *start.sh* script to start Harness.
138 changes: 134 additions & 4 deletions docker-compose/harness/profile-production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,148 @@ version: '3.7'

services:

ng-ui:
deploy:
resources:
limits:
memory: 32m

ng-auth-ui:
deploy:
resources:
limits:
memory: 24m

manager:
environment:
- MEMORY=1024
- COMMON_POOL_CORE_SIZE=2
- COMMON_POOL_MAX_SIZE=20
- DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT=4
- DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT=4
- DEPLOYMENT_EVENT_LISTENER_COUNT=2
- DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT=2
- EVENTS_FRAMEWORK_NETTY_THREADS=2
- EVENT_DELIVERY_ITERATOR_THREAD_COUNT=4
- EXECUTION_EVENT_LISTENER_COUNT=2
- GENERAL_NOTIFY_EVENT_LISTENER_COUNT=2
- MEMORY=512
- ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT=2
- PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT=4
- PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT=4
- SERVER_MAX_THREADS=24
deploy:
resources:
limits:
memory: 1454m

ng-manager:
environment:
- MEMORY=1024m
- COMMON_POOL_CORE_SIZE=2
- COMMON_POOL_MAX_SIZE=10
- EVENTS_FRAMEWORK_NETTY_THREADS=2
- GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE=4
- MEMORY=512m
- NG_VAULT_ITERATOR_THREAD_POOL_SIZE=4
- PMS_SDK_EXECUTION_POOL_CORE_SIZE=2
- PMS_SDK_EXECUTION_POOL_MAX_SIZE=4
- PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE=2
- PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE=4
- REDIS_NETTY_THREADS=2
- SERVER_MAX_THREADS=32
- WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE=4
deploy:
resources:
limits:
memory: 1454m

pipeline-service:
environment:
- MEMORY=1024m
- ADVISE_EVENT_CONSUMER_THREAD_COUNT=2
- BARRIER_ITERATOR_THREAD_POOL_SIZE=4
- COMMON_POOL_CORE_SIZE=2
- COMMON_POOL_MAX_SIZE=10
- CREATE_PLAN_EVENT_CONSUMER_THREAD_COUNT=2
- EVENTS_FRAMEWORK_NETTY_THREADS=2
- FACILITATE_EVENT_CONSUMER_THREAD_COUNT=2
- GRAPH_UPDATE_EVENT_CONSUMER_THREAD_COUNT=2
- INTERRUPT_CONSUMER_THREAD_COUNT=2
- INTERRUPT_MONITOR_ITERATOR_THREAD_POOL_SIZE=4
- MEMORY=576m
- NODE_START_EVENT_CONSUMER_THREAD_COUNT=2
- ORCHESTRATION_EVENT_CONSUMER_THREAD_COUNT=2
- ORCHESTRATION_POOL_CORE_SIZE=2
- ORCHESTRATION_POOL_MAX_SIZE=4
- ORCHESTRATION_VISUALIZATION_POOL_CORE_SIZE=2
- ORCHESTRATION_VISUALIZATION_POOL_MAX_SIZE=4
- PARTIAL_PLAN_RESPONSE_EVENT_CONSUMER_THREAD_COUNT=2
- PIPELINE_EXECUTION_POOL_CORE_SIZE=2
- PIPELINE_EXECUTION_POOL_MAX_SIZE=20
- PLAN_NOTIFY_EVENT_CONSUMER_THREAD_COUNT=2
- PMS_NOTIFY_EVENT_CONSUMER_THREAD_COUNT=2
- PMS_SDK_EXECUTION_POOL_CORE_SIZE=2
- PMS_SDK_EXECUTION_POOL_MAX_SIZE=4
- PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE=2
- PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE=4
- PROGRESS_EVENT_CONSUMER_THREAD_COUNT=2
- REDIS_NETTY_THREADS=2
- RESOURCE_RESTRAINT_ITERATOR_THREAD_POOL_SIZE=4
- RESUME_EVENT_CONSUMER_THREAD_COUNT=2
- SCHEDULED_TRIGGER_ITERATOR_THREAD_POOL_SIZE=4
- SDK_RESPONSE_EVENT_CONSUMER_THREAD_COUNT=2
- SERVER_MAX_THREADS=24
- TIMEOUT_ENGINE_ITERATOR_THREAD_POOL_SIZE=4
- WEBHOOK_ITERATOR_THREAD_POOL_SIZE=4
deploy:
resources:
limits:
memory: 1320m

platform-service:
environment:
- MEMORY=512m
- COMMON_POOL_CORE_SIZE=2
- COMMON_POOL_MAX_SIZE=4
- EVENTS_FRAMEWORK_NETTY_THREADS=2
- MEMORY=128m
- REDIS_NETTY_THREADS=2
- SERVER_MAX_THREADS=24
deploy:
resources:
limits:
memory: 372m

log-service:
deploy:
resources:
limits:
memory: 160m

scm:
deploy:
resources:
limits:
memory: 96m

delegate-proxy:
deploy:
resources:
limits:
memory: 48m

proxy:
deploy:
resources:
limits:
memory: 32m

redis:
deploy:
resources:
limits:
memory: 384m

mongo:
deploy:
resources:
limits:
memory: 768m
command: --wiredTigerCacheSizeGB 0.375 -f /etc/mongod.conf
7 changes: 7 additions & 0 deletions helm/harness/templates/delegate-proxy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,13 @@ spec:
- image: harness/delegate-proxy-signed:{{ .Values.versions.manager }}
imagePullPolicy: IfNotPresent
name: delegate-proxy
resources:
limits:
memory: {{ .Values.delegate_proxy.resources.limits.memory }}
cpu: {{ .Values.delegate_proxy.resources.limits.cpu }}
requests:
memory: {{ .Values.delegate_proxy.resources.requests.memory }}
cpu: {{ .Values.delegate_proxy.resources.requests.cpu }}

---

Expand Down
7 changes: 7 additions & 0 deletions helm/harness/templates/log-service.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,10 @@ spec:
- name: http-log-svc
containerPort: 8079
protocol: "TCP"
resources:
limits:
memory: {{ .Values.log_service.resources.limits.memory }}
cpu: {{ .Values.log_service.resources.limits.cpu }}
requests:
memory: {{ .Values.log_service.resources.requests.memory }}
cpu: {{ .Values.log_service.resources.requests.cpu }}
37 changes: 22 additions & 15 deletions helm/harness/templates/manager.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,32 +33,32 @@ data:
BLOCKING_CAPABILITY_PERMISSIONS_RECORD_ITERATOR_THREAD_COUNT: '2'
CACHE_BACKEND: REDIS
CAPSULE_JAR: rest-capsule.jar
COMMON_POOL_CORE_SIZE: '1'
COMMON_POOL_MAX_SIZE: '10'
COMMON_POOL_CORE_SIZE: '{{ .Values.manager.config.common_pool_core_size | default "1" }}'
COMMON_POOL_MAX_SIZE: '{{ .Values.manager.config.common_pool_max_size | default "10" }}'
CRONS_THREAD_COUNT: '1'
DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT: '2'
DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.delegate_capabilities_record_iterator_thread_count | default "2" }}'
DELEGATE_DOCKER_IMAGE: harness/delegate:latest
DELEGATE_GRPC_AUTHORITY: default-authority.harness.io
DELEGATE_GRPC_TARGET: {{ .Values.harness_host | default "host.docker.internal" }}:9879
DELEGATE_METADATA_URL: http://proxy/storage/wingsdelegates/delegateprod.txt
DELEGATE_SERVICE_AUTHORITY: default-authority.harness.io
DELEGATE_SERVICE_TARGET: manager:9879
DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT: '2'
DEPLOYMENT_EVENT_LISTENER_COUNT: '1'
DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT: '1'
DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.delegate_task_expiry_check_iterator_thread_count | default "2" }}'
DEPLOYMENT_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.deployment_event_listener_count | default "1" }}'
DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.deployment_time_series_event_listener_count | default "1" }}'
DEPLOY_MODE: KUBERNETES_ONPREM
DEPLOY_VERSION: COMMUNITY
DISABLE_RESOURCE_VALIDATION: 'true'
DISTRIBUTED_LOCK_IMPLEMENTATION: REDIS
ENABLE_AUDIT: 'false'
ENABLE_SERIALGC: 'true'
EVENTS_FRAMEWORK_AVAILABLE_IN_ONPREM: 'true'
EVENTS_FRAMEWORK_NETTY_THREADS: '1'
EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.manager.config.events_framework_netty_threads | default "1" }}'
EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379
EVENT_DELIVERY_ITERATOR_THREAD_COUNT: '2'
EXECUTION_EVENT_LISTENER_COUNT: '1'
EVENT_DELIVERY_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.event_delivery_iterator_thread_count | default "2" }}'
EXECUTION_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.execution_event_listener_count | default "1" }}'
FEATURES: RBAC,LDAP_SSO_PROVIDER,SHELL_SCRIPT_PROVISION,CUSTOM_DASHBOARD,GRAPHQL,SEND_SLACK_NOTIFICATION_FROM_DELEGATE,CONNECTORS_REF_SECRETS_MIGRATION,CDNG_ENABLED,NEXT_GEN_ENABLED,LOG_STREAMING_INTEGRATION,NG_CG_TASK_ASSIGNMENT_ISOLATION
GENERAL_NOTIFY_EVENT_LISTENER_COUNT: '1'
GENERAL_NOTIFY_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.general_notify_event_listener_count | default "1" }}'
GRAPHQL_ENABLED: 'false'
INSTANCE_SYNC_ITERATOR_THREAD_COUNT: '2'
JAVA_ADVANCED_FLAGS: -XX:-TieredCompilation
Expand All @@ -67,18 +67,18 @@ data:
LOGGING_LEVEL: WARN
LOG_STREAMING_SERVICE_BASEURL: http://{{ .Values.harness_host | default "host.docker.internal" }}:{{ .Values.listen_port }}/log-service/
LOG_STREAMING_SERVICE_TOKEN: c76e567a-b341-404d-a8dd-d9738714eb82
MEMORY: '{{ .Values.manager.memory }}'
MEMORY: '{{ .Values.manager.config.memory }}'
NG_MANAGER_BASE_URL: http://proxy/ng/api/
ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT: '1'
PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT: '2'
PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT: '2'
ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.orchestration_notify_event_listener_count | default "1" }}'
PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.perpetual_task_assignment_iterator_thread_count | default "2" }}'
PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.perpetual_task_rebalance_iterator_thread_count | default "2" }}'
REDIS_NETTY_THREADS: '1'
REDIS_URL: redis://redis:6379
RESOURCE_CONSTRAINT_BACKUP_ITERATOR_THREAD_COUNT: '2'
SEARCH_ENABLED: 'false'
SEGMENT_ENABLED_NG: 'true'
SEGMENT_URL_NG: https://stats.drone.ci/api/v1/
SERVER_MAX_THREADS: '12'
SERVER_MAX_THREADS: '{{ .Values.manager.config.server_max_threads | default "12" }}'
SERVER_PORT: '9090'
SERVICE_ACC: /opt/harness/svc/service_acc.json
SETTING_ATTRIBUTE_VALIDATE_CONNECTIVITY_ITERATOR_THREAD_COUNT: '2'
Expand Down Expand Up @@ -158,6 +158,13 @@ spec:
periodSeconds: 5
successThreshold: 1
timeoutSeconds: 1
resources:
limits:
memory: {{ .Values.manager.resources.limits.memory }}
cpu: {{ .Values.manager.resources.limits.cpu }}
requests:
memory: {{ .Values.manager.resources.requests.memory }}
cpu: {{ .Values.manager.resources.requests.cpu }}
volumes:
- name: mongo-config
secret:
Expand Down
11 changes: 9 additions & 2 deletions helm/harness/templates/mongodb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ spec:
- --bind_ip=0.0.0.0
- --auth
- --keyFile=/data/configdb/key.txt
- --wiredTigerCacheSizeGB=1
- --wiredTigerCacheSizeGB={{ .Values.mongo.resources.requests.wiredTigerCacheSize }}
livenessProbe:
exec:
command:
Expand Down Expand Up @@ -439,6 +439,13 @@ spec:
mountPath: /data/configdb
- name: workdir
mountPath: /work-dir
resources:
limits:
memory: {{ .Values.mongo.resources.limits.memory }}
cpu: {{ .Values.mongo.resources.limits.cpu }}
requests:
memory: {{ .Values.mongo.resources.requests.memory }}
cpu: {{ .Values.mongo.resources.requests.cpu }}

volumes:
- name: config
Expand All @@ -464,4 +471,4 @@ spec:
- "ReadWriteOnce"
resources:
requests:
storage: 10Gi
storage: {{ .Values.mongo.resources.requests.storage }}
7 changes: 7 additions & 0 deletions helm/harness/templates/ng-auth-ui.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,10 @@ spec:
envFrom:
- configMapRef:
name: ng-auth-ui
resources:
limits:
memory: {{ .Values.ng_auth_ui.resources.limits.memory }}
cpu: {{ .Values.ng_auth_ui.resources.limits.cpu }}
requests:
memory: {{ .Values.ng_auth_ui.resources.requests.memory }}
cpu: {{ .Values.ng_auth_ui.resources.requests.cpu }}
33 changes: 20 additions & 13 deletions helm/harness/templates/ng-manager.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,18 @@ metadata:
data:
AUDIT_ENABLED: 'false'
CACHE_BACKEND: REDIS
COMMON_POOL_CORE_SIZE: '1'
COMMON_POOL_MAX_SIZE: '5'
COMMON_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.common_pool_core_size | default "1" }}'
COMMON_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.common_pool_max_size | default "5" }}'
CURRENT_GEN_UI_URL: http://proxy/#/
DEPLOY_MODE: KUBERNETES_ONPREM
DEPLOY_VERSION: COMMUNITY
DISABLE_RESOURCE_VALIDATION: 'true'
DISTRIBUTED_LOCK_IMPLEMENTATION: REDIS
ENABLE_DEFAULT_RESOURCE_GROUP_CREATION: 'true'
ENABLE_SERIALGC: 'true'
EVENTS_FRAMEWORK_NETTY_THREADS: '1'
EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.ng_manager.config.events_framework_netty_threads | default "1" }}'
EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379
GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE: '2'
GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.ng_manager.config.git_fullsync_entity_iterator_thread_pool_size | default "2" }}'
GRPC_SERVER_PORT: '9979'
JAVA_ADVANCED_FLAGS: -XX:-TieredCompilation
LICENSE_DEPLOY_VARIANT: COMMUNITY
Expand All @@ -28,31 +28,31 @@ data:
MANAGER_CLIENT_BASEURL: http://proxy/api/
MANAGER_TARGET: manager:9879
MANAGER_UI_URL: http://proxy
MEMORY: '{{ .Values.ng_manager.memory }}m'
MEMORY: '{{ .Values.ng_manager.config.memory }}m'
MOCK_ACCESS_CONTROL_SERVICE: 'true'
NG_MANAGER_API_URL: http://proxy/ng/api/
NG_MANAGER_AUTHORITY: default-authority.harness.io
NG_MANAGER_CLIENT_BASEURL: http://proxy/ng/api/
NG_MANAGER_TARGET: localhost:13002
NG_MANAGER_UI_URL: http://proxy/ng/#/
NG_VAULT_ITERATOR_THREAD_POOL_SIZE: '2'
NG_VAULT_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.ng_manager.config.ng_vault_iterator_thread_pool_size | default "2" }}'
PMS_AUTHORITY: default-authority.harness.io
PMS_GITSYNC_AUTHORITY: default-authority.harness.io
PMS_GITSYNC_TARGET: pipeline-service:14002
PMS_SDK_EXECUTION_POOL_CORE_SIZE: '1'
PMS_SDK_EXECUTION_POOL_MAX_SIZE: '2'
PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '1'
PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '2'
PMS_SDK_EXECUTION_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.pms_sdk_execution_pool_core_size | default "1" }}'
PMS_SDK_EXECUTION_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.pms_sdk_execution_pool_max_size | default "2" }}'
PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.pms_sdk_orchestration_event_pool_core_size | default "1" }}'
PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.pms_sdk_orchestration_event_pool_max_size | default "2" }}'
PMS_TARGET: pipeline-service:12011
REDIS_NETTY_THREADS: '1'
REDIS_NETTY_THREADS: '{{ .Values.ng_manager.config.redis_netty_threads | default "1" }}'
RESOURCE_GROUP_BASE_URL: http://proxy/resourcegroup/api/
SCM_SERVICE_URI: scm:8091
SEGMENT_ENABLED: 'true'
SEGMENT_URL: https://stats.drone.ci/api/v1/
SERVER_MAX_THREADS: '16'
SERVER_MAX_THREADS: '{{ .Values.ng_manager.config.server_max_threads | default "16" }}'
SHOULD_CONFIGURE_WITH_PMS: 'true'
USE_REDIS_FOR_SDK_RESPONSE_EVENTS: 'true'
WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE: '2'
WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE: '{{ .Values.ng_manager.config.webhook_event_processing_service_thread_size | default "2" }}'

---

Expand Down Expand Up @@ -146,3 +146,10 @@ spec:
timeoutSeconds: 10
periodSeconds: 10
failureThreshold: 10
resources:
limits:
memory: {{ .Values.ng_manager.resources.limits.memory }}
cpu: {{ .Values.ng_manager.resources.limits.cpu }}
requests:
memory: {{ .Values.ng_manager.resources.requests.memory }}
cpu: {{ .Values.ng_manager.resources.requests.cpu }}
Loading