diff --git a/docker-compose/harness/README.md b/docker-compose/harness/README.md index 987c8b7..0746d92 100644 --- a/docker-compose/harness/README.md +++ b/docker-compose/harness/README.md @@ -60,7 +60,7 @@ docker-compose down -v ``` 2) Re-run docker compose ```shell - docker compose up -d + docker-compose up -d ``` ## Advanced Configuration @@ -72,10 +72,3 @@ You simply need to set the `HARNESS_HOST` environment variable, see [Set hostnam ```shell export HARNESS_HOST="192.168.0.1" ``` - -## Airgapped installation -The above scripts will require connectivity to Dockerhub. In case connectivity to Dockerhub is not available, you can follow the steps as below -1) Use the script *download_images.sh* to download the docker images. -2) You can then tar this entire folder and copy it to the target VM -3) Use the script *unpack_images.sh* to load the images onto the VM. -4) Use the *start.sh* script to start Harness. diff --git a/docker-compose/harness/profile-production.yml b/docker-compose/harness/profile-production.yml index dc21ff0..06017c2 100644 --- a/docker-compose/harness/profile-production.yml +++ b/docker-compose/harness/profile-production.yml @@ -2,18 +2,148 @@ version: '3.7' services: + ng-ui: + deploy: + resources: + limits: + memory: 32m + + ng-auth-ui: + deploy: + resources: + limits: + memory: 24m + manager: environment: - - MEMORY=1024 + - COMMON_POOL_CORE_SIZE=2 + - COMMON_POOL_MAX_SIZE=20 + - DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT=4 + - DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT=4 + - DEPLOYMENT_EVENT_LISTENER_COUNT=2 + - DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT=2 + - EVENTS_FRAMEWORK_NETTY_THREADS=2 + - EVENT_DELIVERY_ITERATOR_THREAD_COUNT=4 + - EXECUTION_EVENT_LISTENER_COUNT=2 + - GENERAL_NOTIFY_EVENT_LISTENER_COUNT=2 + - MEMORY=512 + - ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT=2 + - PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT=4 + - PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT=4 + - SERVER_MAX_THREADS=24 + deploy: + resources: + limits: + memory: 1454m ng-manager: environment: - - MEMORY=1024m + - COMMON_POOL_CORE_SIZE=2 + - COMMON_POOL_MAX_SIZE=10 + - EVENTS_FRAMEWORK_NETTY_THREADS=2 + - GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE=4 + - MEMORY=512m + - NG_VAULT_ITERATOR_THREAD_POOL_SIZE=4 + - PMS_SDK_EXECUTION_POOL_CORE_SIZE=2 + - PMS_SDK_EXECUTION_POOL_MAX_SIZE=4 + - PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE=2 + - PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE=4 + - REDIS_NETTY_THREADS=2 + - SERVER_MAX_THREADS=32 + - WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE=4 + deploy: + resources: + limits: + memory: 1454m pipeline-service: environment: - - MEMORY=1024m + - ADVISE_EVENT_CONSUMER_THREAD_COUNT=2 + - BARRIER_ITERATOR_THREAD_POOL_SIZE=4 + - COMMON_POOL_CORE_SIZE=2 + - COMMON_POOL_MAX_SIZE=10 + - CREATE_PLAN_EVENT_CONSUMER_THREAD_COUNT=2 + - EVENTS_FRAMEWORK_NETTY_THREADS=2 + - FACILITATE_EVENT_CONSUMER_THREAD_COUNT=2 + - GRAPH_UPDATE_EVENT_CONSUMER_THREAD_COUNT=2 + - INTERRUPT_CONSUMER_THREAD_COUNT=2 + - INTERRUPT_MONITOR_ITERATOR_THREAD_POOL_SIZE=4 + - MEMORY=576m + - NODE_START_EVENT_CONSUMER_THREAD_COUNT=2 + - ORCHESTRATION_EVENT_CONSUMER_THREAD_COUNT=2 + - ORCHESTRATION_POOL_CORE_SIZE=2 + - ORCHESTRATION_POOL_MAX_SIZE=4 + - ORCHESTRATION_VISUALIZATION_POOL_CORE_SIZE=2 + - ORCHESTRATION_VISUALIZATION_POOL_MAX_SIZE=4 + - PARTIAL_PLAN_RESPONSE_EVENT_CONSUMER_THREAD_COUNT=2 + - PIPELINE_EXECUTION_POOL_CORE_SIZE=2 + - PIPELINE_EXECUTION_POOL_MAX_SIZE=20 + - PLAN_NOTIFY_EVENT_CONSUMER_THREAD_COUNT=2 + - PMS_NOTIFY_EVENT_CONSUMER_THREAD_COUNT=2 + - PMS_SDK_EXECUTION_POOL_CORE_SIZE=2 + - PMS_SDK_EXECUTION_POOL_MAX_SIZE=4 + - PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE=2 + - PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE=4 + - PROGRESS_EVENT_CONSUMER_THREAD_COUNT=2 + - REDIS_NETTY_THREADS=2 + - RESOURCE_RESTRAINT_ITERATOR_THREAD_POOL_SIZE=4 + - RESUME_EVENT_CONSUMER_THREAD_COUNT=2 + - SCHEDULED_TRIGGER_ITERATOR_THREAD_POOL_SIZE=4 + - SDK_RESPONSE_EVENT_CONSUMER_THREAD_COUNT=2 + - SERVER_MAX_THREADS=24 + - TIMEOUT_ENGINE_ITERATOR_THREAD_POOL_SIZE=4 + - WEBHOOK_ITERATOR_THREAD_POOL_SIZE=4 + deploy: + resources: + limits: + memory: 1320m platform-service: environment: - - MEMORY=512m + - COMMON_POOL_CORE_SIZE=2 + - COMMON_POOL_MAX_SIZE=4 + - EVENTS_FRAMEWORK_NETTY_THREADS=2 + - MEMORY=128m + - REDIS_NETTY_THREADS=2 + - SERVER_MAX_THREADS=24 + deploy: + resources: + limits: + memory: 372m + + log-service: + deploy: + resources: + limits: + memory: 160m + + scm: + deploy: + resources: + limits: + memory: 96m + + delegate-proxy: + deploy: + resources: + limits: + memory: 48m + + proxy: + deploy: + resources: + limits: + memory: 32m + + redis: + deploy: + resources: + limits: + memory: 384m + + mongo: + deploy: + resources: + limits: + memory: 768m + command: --wiredTigerCacheSizeGB 0.375 -f /etc/mongod.conf diff --git a/helm/harness/templates/delegate-proxy.yaml b/helm/harness/templates/delegate-proxy.yaml index e7d0599..3b17051 100644 --- a/helm/harness/templates/delegate-proxy.yaml +++ b/helm/harness/templates/delegate-proxy.yaml @@ -18,6 +18,13 @@ spec: - image: harness/delegate-proxy-signed:{{ .Values.versions.manager }} imagePullPolicy: IfNotPresent name: delegate-proxy + resources: + limits: + memory: {{ .Values.delegate_proxy.resources.limits.memory }} + cpu: {{ .Values.delegate_proxy.resources.limits.cpu }} + requests: + memory: {{ .Values.delegate_proxy.resources.requests.memory }} + cpu: {{ .Values.delegate_proxy.resources.requests.cpu }} --- diff --git a/helm/harness/templates/log-service.yaml b/helm/harness/templates/log-service.yaml index 90eaf5d..18cdb65 100644 --- a/helm/harness/templates/log-service.yaml +++ b/helm/harness/templates/log-service.yaml @@ -52,3 +52,10 @@ spec: - name: http-log-svc containerPort: 8079 protocol: "TCP" + resources: + limits: + memory: {{ .Values.log_service.resources.limits.memory }} + cpu: {{ .Values.log_service.resources.limits.cpu }} + requests: + memory: {{ .Values.log_service.resources.requests.memory }} + cpu: {{ .Values.log_service.resources.requests.cpu }} diff --git a/helm/harness/templates/manager.yaml b/helm/harness/templates/manager.yaml index cfc985b..51f9e94 100644 --- a/helm/harness/templates/manager.yaml +++ b/helm/harness/templates/manager.yaml @@ -33,19 +33,19 @@ data: BLOCKING_CAPABILITY_PERMISSIONS_RECORD_ITERATOR_THREAD_COUNT: '2' CACHE_BACKEND: REDIS CAPSULE_JAR: rest-capsule.jar - COMMON_POOL_CORE_SIZE: '1' - COMMON_POOL_MAX_SIZE: '10' + COMMON_POOL_CORE_SIZE: '{{ .Values.manager.config.common_pool_core_size | default "1" }}' + COMMON_POOL_MAX_SIZE: '{{ .Values.manager.config.common_pool_max_size | default "10" }}' CRONS_THREAD_COUNT: '1' - DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT: '2' + DELEGATE_CAPABILITIES_RECORD_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.delegate_capabilities_record_iterator_thread_count | default "2" }}' DELEGATE_DOCKER_IMAGE: harness/delegate:latest DELEGATE_GRPC_AUTHORITY: default-authority.harness.io DELEGATE_GRPC_TARGET: {{ .Values.harness_host | default "host.docker.internal" }}:9879 DELEGATE_METADATA_URL: http://proxy/storage/wingsdelegates/delegateprod.txt DELEGATE_SERVICE_AUTHORITY: default-authority.harness.io DELEGATE_SERVICE_TARGET: manager:9879 - DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT: '2' - DEPLOYMENT_EVENT_LISTENER_COUNT: '1' - DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT: '1' + DELEGATE_TASK_EXPIRY_CHECK_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.delegate_task_expiry_check_iterator_thread_count | default "2" }}' + DEPLOYMENT_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.deployment_event_listener_count | default "1" }}' + DEPLOYMENT_TIME_SERIES_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.deployment_time_series_event_listener_count | default "1" }}' DEPLOY_MODE: KUBERNETES_ONPREM DEPLOY_VERSION: COMMUNITY DISABLE_RESOURCE_VALIDATION: 'true' @@ -53,12 +53,12 @@ data: ENABLE_AUDIT: 'false' ENABLE_SERIALGC: 'true' EVENTS_FRAMEWORK_AVAILABLE_IN_ONPREM: 'true' - EVENTS_FRAMEWORK_NETTY_THREADS: '1' + EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.manager.config.events_framework_netty_threads | default "1" }}' EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379 - EVENT_DELIVERY_ITERATOR_THREAD_COUNT: '2' - EXECUTION_EVENT_LISTENER_COUNT: '1' + EVENT_DELIVERY_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.event_delivery_iterator_thread_count | default "2" }}' + EXECUTION_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.execution_event_listener_count | default "1" }}' FEATURES: RBAC,LDAP_SSO_PROVIDER,SHELL_SCRIPT_PROVISION,CUSTOM_DASHBOARD,GRAPHQL,SEND_SLACK_NOTIFICATION_FROM_DELEGATE,CONNECTORS_REF_SECRETS_MIGRATION,CDNG_ENABLED,NEXT_GEN_ENABLED,LOG_STREAMING_INTEGRATION,NG_CG_TASK_ASSIGNMENT_ISOLATION - GENERAL_NOTIFY_EVENT_LISTENER_COUNT: '1' + GENERAL_NOTIFY_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.general_notify_event_listener_count | default "1" }}' GRAPHQL_ENABLED: 'false' INSTANCE_SYNC_ITERATOR_THREAD_COUNT: '2' JAVA_ADVANCED_FLAGS: -XX:-TieredCompilation @@ -67,18 +67,18 @@ data: LOGGING_LEVEL: WARN LOG_STREAMING_SERVICE_BASEURL: http://{{ .Values.harness_host | default "host.docker.internal" }}:{{ .Values.listen_port }}/log-service/ LOG_STREAMING_SERVICE_TOKEN: c76e567a-b341-404d-a8dd-d9738714eb82 - MEMORY: '{{ .Values.manager.memory }}' + MEMORY: '{{ .Values.manager.config.memory }}' NG_MANAGER_BASE_URL: http://proxy/ng/api/ - ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT: '1' - PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT: '2' - PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT: '2' + ORCHESTRATION_NOTIFY_EVENT_LISTENER_COUNT: '{{ .Values.manager.config.orchestration_notify_event_listener_count | default "1" }}' + PERPETUAL_TASK_ASSIGNMENT_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.perpetual_task_assignment_iterator_thread_count | default "2" }}' + PERPETUAL_TASK_REBALANCE_ITERATOR_THREAD_COUNT: '{{ .Values.manager.config.perpetual_task_rebalance_iterator_thread_count | default "2" }}' REDIS_NETTY_THREADS: '1' REDIS_URL: redis://redis:6379 RESOURCE_CONSTRAINT_BACKUP_ITERATOR_THREAD_COUNT: '2' SEARCH_ENABLED: 'false' SEGMENT_ENABLED_NG: 'true' SEGMENT_URL_NG: https://stats.drone.ci/api/v1/ - SERVER_MAX_THREADS: '12' + SERVER_MAX_THREADS: '{{ .Values.manager.config.server_max_threads | default "12" }}' SERVER_PORT: '9090' SERVICE_ACC: /opt/harness/svc/service_acc.json SETTING_ATTRIBUTE_VALIDATE_CONNECTIVITY_ITERATOR_THREAD_COUNT: '2' @@ -158,6 +158,13 @@ spec: periodSeconds: 5 successThreshold: 1 timeoutSeconds: 1 + resources: + limits: + memory: {{ .Values.manager.resources.limits.memory }} + cpu: {{ .Values.manager.resources.limits.cpu }} + requests: + memory: {{ .Values.manager.resources.requests.memory }} + cpu: {{ .Values.manager.resources.requests.cpu }} volumes: - name: mongo-config secret: diff --git a/helm/harness/templates/mongodb.yaml b/helm/harness/templates/mongodb.yaml index 99d5f1a..e507e7a 100644 --- a/helm/harness/templates/mongodb.yaml +++ b/helm/harness/templates/mongodb.yaml @@ -409,7 +409,7 @@ spec: - --bind_ip=0.0.0.0 - --auth - --keyFile=/data/configdb/key.txt - - --wiredTigerCacheSizeGB=1 + - --wiredTigerCacheSizeGB={{ .Values.mongo.resources.requests.wiredTigerCacheSize }} livenessProbe: exec: command: @@ -439,6 +439,13 @@ spec: mountPath: /data/configdb - name: workdir mountPath: /work-dir + resources: + limits: + memory: {{ .Values.mongo.resources.limits.memory }} + cpu: {{ .Values.mongo.resources.limits.cpu }} + requests: + memory: {{ .Values.mongo.resources.requests.memory }} + cpu: {{ .Values.mongo.resources.requests.cpu }} volumes: - name: config @@ -464,4 +471,4 @@ spec: - "ReadWriteOnce" resources: requests: - storage: 10Gi + storage: {{ .Values.mongo.resources.requests.storage }} diff --git a/helm/harness/templates/ng-auth-ui.yaml b/helm/harness/templates/ng-auth-ui.yaml index 6f4ebfd..63aa738 100644 --- a/helm/harness/templates/ng-auth-ui.yaml +++ b/helm/harness/templates/ng-auth-ui.yaml @@ -66,3 +66,10 @@ spec: envFrom: - configMapRef: name: ng-auth-ui + resources: + limits: + memory: {{ .Values.ng_auth_ui.resources.limits.memory }} + cpu: {{ .Values.ng_auth_ui.resources.limits.cpu }} + requests: + memory: {{ .Values.ng_auth_ui.resources.requests.memory }} + cpu: {{ .Values.ng_auth_ui.resources.requests.cpu }} diff --git a/helm/harness/templates/ng-manager.yaml b/helm/harness/templates/ng-manager.yaml index 244ea81..1ea04cd 100644 --- a/helm/harness/templates/ng-manager.yaml +++ b/helm/harness/templates/ng-manager.yaml @@ -5,8 +5,8 @@ metadata: data: AUDIT_ENABLED: 'false' CACHE_BACKEND: REDIS - COMMON_POOL_CORE_SIZE: '1' - COMMON_POOL_MAX_SIZE: '5' + COMMON_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.common_pool_core_size | default "1" }}' + COMMON_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.common_pool_max_size | default "5" }}' CURRENT_GEN_UI_URL: http://proxy/#/ DEPLOY_MODE: KUBERNETES_ONPREM DEPLOY_VERSION: COMMUNITY @@ -14,9 +14,9 @@ data: DISTRIBUTED_LOCK_IMPLEMENTATION: REDIS ENABLE_DEFAULT_RESOURCE_GROUP_CREATION: 'true' ENABLE_SERIALGC: 'true' - EVENTS_FRAMEWORK_NETTY_THREADS: '1' + EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.ng_manager.config.events_framework_netty_threads | default "1" }}' EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379 - GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE: '2' + GIT_FULLSYNC_ENTITY_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.ng_manager.config.git_fullsync_entity_iterator_thread_pool_size | default "2" }}' GRPC_SERVER_PORT: '9979' JAVA_ADVANCED_FLAGS: -XX:-TieredCompilation LICENSE_DEPLOY_VARIANT: COMMUNITY @@ -28,31 +28,31 @@ data: MANAGER_CLIENT_BASEURL: http://proxy/api/ MANAGER_TARGET: manager:9879 MANAGER_UI_URL: http://proxy - MEMORY: '{{ .Values.ng_manager.memory }}m' + MEMORY: '{{ .Values.ng_manager.config.memory }}m' MOCK_ACCESS_CONTROL_SERVICE: 'true' NG_MANAGER_API_URL: http://proxy/ng/api/ NG_MANAGER_AUTHORITY: default-authority.harness.io NG_MANAGER_CLIENT_BASEURL: http://proxy/ng/api/ NG_MANAGER_TARGET: localhost:13002 NG_MANAGER_UI_URL: http://proxy/ng/#/ - NG_VAULT_ITERATOR_THREAD_POOL_SIZE: '2' + NG_VAULT_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.ng_manager.config.ng_vault_iterator_thread_pool_size | default "2" }}' PMS_AUTHORITY: default-authority.harness.io PMS_GITSYNC_AUTHORITY: default-authority.harness.io PMS_GITSYNC_TARGET: pipeline-service:14002 - PMS_SDK_EXECUTION_POOL_CORE_SIZE: '1' - PMS_SDK_EXECUTION_POOL_MAX_SIZE: '2' - PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '1' - PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '2' + PMS_SDK_EXECUTION_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.pms_sdk_execution_pool_core_size | default "1" }}' + PMS_SDK_EXECUTION_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.pms_sdk_execution_pool_max_size | default "2" }}' + PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '{{ .Values.ng_manager.config.pms_sdk_orchestration_event_pool_core_size | default "1" }}' + PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '{{ .Values.ng_manager.config.pms_sdk_orchestration_event_pool_max_size | default "2" }}' PMS_TARGET: pipeline-service:12011 - REDIS_NETTY_THREADS: '1' + REDIS_NETTY_THREADS: '{{ .Values.ng_manager.config.redis_netty_threads | default "1" }}' RESOURCE_GROUP_BASE_URL: http://proxy/resourcegroup/api/ SCM_SERVICE_URI: scm:8091 SEGMENT_ENABLED: 'true' SEGMENT_URL: https://stats.drone.ci/api/v1/ - SERVER_MAX_THREADS: '16' + SERVER_MAX_THREADS: '{{ .Values.ng_manager.config.server_max_threads | default "16" }}' SHOULD_CONFIGURE_WITH_PMS: 'true' USE_REDIS_FOR_SDK_RESPONSE_EVENTS: 'true' - WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE: '2' + WEBHOOK_EVENT_PROCESSING_SERVICE_THREAD_SIZE: '{{ .Values.ng_manager.config.webhook_event_processing_service_thread_size | default "2" }}' --- @@ -146,3 +146,10 @@ spec: timeoutSeconds: 10 periodSeconds: 10 failureThreshold: 10 + resources: + limits: + memory: {{ .Values.ng_manager.resources.limits.memory }} + cpu: {{ .Values.ng_manager.resources.limits.cpu }} + requests: + memory: {{ .Values.ng_manager.resources.requests.memory }} + cpu: {{ .Values.ng_manager.resources.requests.cpu }} diff --git a/helm/harness/templates/ng-ui.yaml b/helm/harness/templates/ng-ui.yaml index a3ab3c9..cbee8ef 100644 --- a/helm/harness/templates/ng-ui.yaml +++ b/helm/harness/templates/ng-ui.yaml @@ -66,3 +66,10 @@ spec: envFrom: - configMapRef: name: ng-ui + resources: + limits: + memory: {{ .Values.ng_ui.resources.limits.memory }} + cpu: {{ .Values.ng_ui.resources.limits.cpu }} + requests: + memory: {{ .Values.ng_ui.resources.requests.memory }} + cpu: {{ .Values.ng_ui.resources.requests.cpu }} diff --git a/helm/harness/templates/pipeline-service.yaml b/helm/harness/templates/pipeline-service.yaml index fc32a01..c626c75 100644 --- a/helm/harness/templates/pipeline-service.yaml +++ b/helm/harness/templates/pipeline-service.yaml @@ -3,30 +3,30 @@ kind: ConfigMap metadata: name: pipeline-service data: - ADVISE_EVENT_CONSUMER_THREAD_COUNT: '1' + ADVISE_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.advise_event_consumer_thread_count | default "1" }}' AUTH_ENABLED: 'true' - BARRIER_ITERATOR_THREAD_POOL_SIZE: '2' + BARRIER_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.barrier_iterator_thread_pool_size | default "2" }}' CACHE_BACKEND: REDIS CACHE_CONFIG_REDIS_URL: redis://redis:6379 CI_MANAGER_AUTHORITY: default-authority.harness.io CI_MANAGER_BASE_URL: http://proxy/ci/ CI_MANAGER_TARGET: ci-manager:9979 - COMMON_POOL_CORE_SIZE: '1' - COMMON_POOL_MAX_SIZE: '5' - CREATE_PLAN_EVENT_CONSUMER_THREAD_COUNT: '1' + COMMON_POOL_CORE_SIZE: '{{ .Values.pipeline.config.common_pool_core_size | default "1" }}' + COMMON_POOL_MAX_SIZE: '{{ .Values.pipeline.config.common_pool_max_size | default "5" }}' + CREATE_PLAN_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.create_plan_event_consumer_thread_count | default "1" }}' DEPLOY_MODE: KUBERNETES_ONPREM DEPLOY_VERSION: COMMUNITY DISABLE_RESOURCE_VALIDATION: 'true' DISTRIBUTED_LOCK_IMPLEMENTATION: REDIS ENABLE_AUDIT: 'false' ENABLE_SERIALGC: 'true' - EVENTS_FRAMEWORK_NETTY_THREADS: '1' + EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.pipeline.config.events_framework_netty_threads | default "1" }}' EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379 - FACILITATE_EVENT_CONSUMER_THREAD_COUNT: '1' - GRAPH_UPDATE_EVENT_CONSUMER_THREAD_COUNT: '1' + FACILITATE_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.facilitate_event_consumer_thread_count | default "1" }}' + GRAPH_UPDATE_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.graph_update_event_consumer_thread_count | default "1" }}' GRPC_SERVER_PORT: '12011' - INTERRUPT_CONSUMER_THREAD_COUNT: '1' - INTERRUPT_MONITOR_ITERATOR_THREAD_POOL_SIZE: '2' + INTERRUPT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.interrupt_consumer_thread_count | default "1" }}' + INTERRUPT_MONITOR_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.interrupt_monitor_iterator_thread_pool_size | default "2" }}' JAVA_ADVANCED_FLAGS: -XX:-TieredCompilation LOCK_CONFIG_REDIS_URL: redis://redis:6379 LOGGING_LEVEL: WARN @@ -35,46 +35,46 @@ data: MANAGER_BASE_URL: http://proxy/api/ MANAGER_CLIENT_BASEURL: http://proxy/api/ MANAGER_TARGET: manager:9879 - MEMORY: '{{ .Values.pms.memory }}m' + MEMORY: '{{ .Values.pipeline.config.memory }}m' MOCK_ACCESS_CONTROL_SERVICE: 'true' NG_MANAGER_AUTHORITY: default-authority.harness.io NG_MANAGER_BASE_URL: http://proxy/ng/api/ NG_MANAGER_GITSYNC_AUTHORITY: default-authority.harness.io NG_MANAGER_GITSYNC_TARGET: ng-manager:13002 NG_MANAGER_TARGET: ng-manager:9979 - NODE_START_EVENT_CONSUMER_THREAD_COUNT: '1' + NODE_START_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.node_start_event_consumer_thread_count | default "1" }}' NOTIFICATION_BASE_URL: http://proxy/notifications/api/ - ORCHESTRATION_EVENT_CONSUMER_THREAD_COUNT: '1' - ORCHESTRATION_POOL_CORE_SIZE: '1' - ORCHESTRATION_POOL_MAX_SIZE: '2' - ORCHESTRATION_VISUALIZATION_POOL_CORE_SIZE: '1' - ORCHESTRATION_VISUALIZATION_POOL_MAX_SIZE: '2' - PARTIAL_PLAN_RESPONSE_EVENT_CONSUMER_THREAD_COUNT: '1' - PIPELINE_EXECUTION_POOL_CORE_SIZE: '1' - PIPELINE_EXECUTION_POOL_MAX_SIZE: '10' + ORCHESTRATION_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.orchestration_event_consumer_thread_count | default "1" }}' + ORCHESTRATION_POOL_CORE_SIZE: '{{ .Values.pipeline.config.orchestration_pool_core_size | default "1" }}' + ORCHESTRATION_POOL_MAX_SIZE: '{{ .Values.pipeline.config.orchestration_pool_max_size | default "2" }}' + ORCHESTRATION_VISUALIZATION_POOL_CORE_SIZE: '{{ .Values.pipeline.config.orchestration_visualization_pool_core_size | default "1" }}' + ORCHESTRATION_VISUALIZATION_POOL_MAX_SIZE: '{{ .Values.pipeline.config.orchestration_visualization_pool_max_size | default "2" }}' + PARTIAL_PLAN_RESPONSE_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.partial_plan_response_event_consumer_thread_count | default "1" }}' + PIPELINE_EXECUTION_POOL_CORE_SIZE: '{{ .Values.pipeline.config.pipeline_execution_pool_core_size | default "1" }}' + PIPELINE_EXECUTION_POOL_MAX_SIZE: '{{ .Values.pipeline.config.pipeline_execution_pool_max_size | default "10" }}' PIPELINE_SERVICE_BASE_URL: http://proxy/ng/# - PLAN_NOTIFY_EVENT_CONSUMER_THREAD_COUNT: '1' + PLAN_NOTIFY_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.plan_notify_event_consumer_thread_count | default "1" }}' PMS_API_BASE_URL: http://proxy/pipeline/api/ - PMS_NOTIFY_EVENT_CONSUMER_THREAD_COUNT: '1' - PMS_SDK_EXECUTION_POOL_CORE_SIZE: '1' - PMS_SDK_EXECUTION_POOL_MAX_SIZE: '2' - PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '1' - PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '2' - PROGRESS_EVENT_CONSUMER_THREAD_COUNT: '1' - REDIS_NETTY_THREADS: '1' - RESOURCE_RESTRAINT_ITERATOR_THREAD_POOL_SIZE: '2' - RESUME_EVENT_CONSUMER_THREAD_COUNT: '1' - SCHEDULED_TRIGGER_ITERATOR_THREAD_POOL_SIZE: '2' + PMS_NOTIFY_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.pms_notify_event_consumer_thread_count | default "1" }}' + PMS_SDK_EXECUTION_POOL_CORE_SIZE: '{{ .Values.pipeline.config.pms_sdk_execution_pool_core_size | default "1" }}' + PMS_SDK_EXECUTION_POOL_MAX_SIZE: '{{ .Values.pipeline.config.pms_sdk_execution_pool_max_size | default "2" }}' + PMS_SDK_ORCHESTRATION_EVENT_POOL_CORE_SIZE: '{{ .Values.pipeline.config.pms_sdk_orchestration_event_pool_core_size | default "1" }}' + PMS_SDK_ORCHESTRATION_EVENT_POOL_MAX_SIZE: '{{ .Values.pipeline.config.pms_sdk_orchestration_event_pool_max_size | default "2" }}' + PROGRESS_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.progress_event_consumer_thread_count | default "1" }}' + REDIS_NETTY_THREADS: '{{ .Values.pipeline.config.redis_netty_threads | default "1" }}' + RESOURCE_RESTRAINT_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.resource_restraint_iterator_thread_pool_size | default "2" }}' + RESUME_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.resume_event_consumer_thread_count | default "1" }}' + SCHEDULED_TRIGGER_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.scheduled_trigger_iterator_thread_pool_size | default "2" }}' SCM_SERVICE_URI: scm:8091 - SDK_RESPONSE_EVENT_CONSUMER_THREAD_COUNT: '1' + SDK_RESPONSE_EVENT_CONSUMER_THREAD_COUNT: '{{ .Values.pipeline.config.sdk_response_event_consumer_thread_count | default "1" }}' SEGMENT_ENABLED: 'true' SEGMENT_URL: https://stats.drone.ci/api/v1/ - SERVER_MAX_THREADS: '12' - TIMEOUT_ENGINE_ITERATOR_THREAD_POOL_SIZE: '2' + SERVER_MAX_THREADS: '{{ .Values.pipeline.config.server_max_threads | default "12" }}' + TIMEOUT_ENGINE_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.timeout_engine_iterator_thread_pool_size | default "2" }}' USE_REDIS_FOR_INTERRUPTS: 'true' USE_REDIS_FOR_ORCHESTRATION_EVENTS: 'true' USE_REDIS_FOR_SDK_RESPONSE_EVENTS: 'true' - WEBHOOK_ITERATOR_THREAD_POOL_SIZE: '2' + WEBHOOK_ITERATOR_THREAD_POOL_SIZE: '{{ .Values.pipeline.config.webhook_iterator_thread_pool_size | default "2" }}' --- @@ -170,6 +170,13 @@ spec: timeoutSeconds: 5 periodSeconds: 10 failureThreshold: 20 + resources: + limits: + memory: {{ .Values.pipeline.resources.limits.memory }} + cpu: {{ .Values.pipeline.resources.limits.cpu }} + requests: + memory: {{ .Values.pipeline.resources.requests.memory }} + cpu: {{ .Values.pipeline.resources.requests.cpu }} volumes: - name: mongo-config secret: diff --git a/helm/harness/templates/platform-service.yaml b/helm/harness/templates/platform-service.yaml index fdf4eda..0d5a878 100644 --- a/helm/harness/templates/platform-service.yaml +++ b/helm/harness/templates/platform-service.yaml @@ -5,8 +5,8 @@ metadata: data: AUDIT_CLIENT_BASEURL: http://proxy/audit/api/ AUDIT_ENABLED: 'false' - COMMON_POOL_CORE_SIZE: '1' - COMMON_POOL_MAX_SIZE: '2' + COMMON_POOL_CORE_SIZE: '{{ .Values.manager.config.common_pool_core_size | default "1" }}' + COMMON_POOL_MAX_SIZE: '{{ .Values.manager.config.common_pool_max_size | default "2" }}' DEPLOY_MODE: KUBERNETES_ONPREM DEPLOY_VERSION: COMMUNITY DISABLE_RESOURCE_VALIDATION: 'true' @@ -14,7 +14,7 @@ data: ENABLE_AUDIT_SERVICE: 'false' ENABLE_RESOURCE_GROUP: 'false' ENABLE_SERIALGC: 'true' - EVENTS_FRAMEWORK_NETTY_THREADS: '1' + EVENTS_FRAMEWORK_NETTY_THREADS: '{{ .Values.manager.config.events_framework_netty_threads | default "1" }}' EVENTS_FRAMEWORK_REDIS_URL: redis://redis:6379 GRPC_MANAGER_AUTHORITY: default-authority.harness.io GRPC_MANAGER_TARGET: manager:9879 @@ -22,14 +22,14 @@ data: LOCK_CONFIG_REDIS_URL: redis://redis:6379 LOGGING_LEVEL: WARN MANAGER_CLIENT_BASEURL: http://proxy/api/ - MEMORY: '{{ .Values.platform.memory }}m' + MEMORY: '{{ .Values.platform.config.memory }}m' MOCK_ACCESS_CONTROL_SERVICE: 'true' NG_MANAGER_CLIENT_BASEURL: http://proxy/ng/api/ PIPELINE_SERVICE_CLIENT_BASEURL: http://proxy/pipeline/api/ RBAC_URL: http://proxy/ng/api/ - REDIS_NETTY_THREADS: '1' + REDIS_NETTY_THREADS: '{{ .Values.manager.config.redis_netty_threads | default "1" }}' RESOURCE_GROUP_CLIENT_BASE_URL: http://proxy/resourcegroup/api/ - SERVER_MAX_THREADS: '12' + SERVER_MAX_THREADS: '{{ .Values.manager.config.server_max_threads | default "12" }}' SMTP_HOST: '' SMTP_PASSWORD: '' SMTP_PORT: '465' @@ -121,6 +121,13 @@ spec: timeoutSeconds: 10 periodSeconds: 10 failureThreshold: 20 + resources: + limits: + memory: {{ .Values.platform.resources.limits.memory }} + cpu: {{ .Values.platform.resources.limits.cpu }} + requests: + memory: {{ .Values.platform.resources.requests.memory }} + cpu: {{ .Values.platform.resources.requests.cpu }} volumes: - name: mongo-config secret: diff --git a/helm/harness/templates/proxy.yaml b/helm/harness/templates/proxy.yaml index 37972ac..90eed68 100644 --- a/helm/harness/templates/proxy.yaml +++ b/helm/harness/templates/proxy.yaml @@ -220,6 +220,13 @@ spec: - name: nginx-config mountPath: /etc/nginx/nginx.conf subPath: nginx.conf + resources: + limits: + memory: {{ .Values.proxy.resources.limits.memory }} + cpu: {{ .Values.proxy.resources.limits.cpu }} + requests: + memory: {{ .Values.proxy.resources.requests.memory }} + cpu: {{ .Values.proxy.resources.requests.cpu }} volumes: - name: nginx-config configMap: diff --git a/helm/harness/templates/redis.yaml b/helm/harness/templates/redis.yaml index 4f090d8..325360f 100644 --- a/helm/harness/templates/redis.yaml +++ b/helm/harness/templates/redis.yaml @@ -58,6 +58,13 @@ spec: volumeMounts: - mountPath: /data name: data + resources: + limits: + memory: {{ .Values.redis.resources.limits.memory }} + cpu: {{ .Values.redis.resources.limits.cpu }} + requests: + memory: {{ .Values.redis.resources.requests.memory }} + cpu: {{ .Values.redis.resources.requests.cpu }} volumes: - name: data emptyDir: {} diff --git a/helm/harness/templates/scm-service.yaml b/helm/harness/templates/scm-service.yaml index a948305..1747ee6 100644 --- a/helm/harness/templates/scm-service.yaml +++ b/helm/harness/templates/scm-service.yaml @@ -43,3 +43,10 @@ spec: exec: command: ["/grpc_health_probe", "-addr=:8091"] initialDelaySeconds: 10 + resources: + limits: + memory: {{ .Values.scm.resources.limits.memory }} + cpu: {{ .Values.scm.resources.limits.cpu }} + requests: + memory: {{ .Values.scm.resources.requests.memory }} + cpu: {{ .Values.scm.resources.requests.cpu }} diff --git a/helm/harness/values-production.yaml b/helm/harness/values-production.yaml index 385739e..6d943bc 100644 --- a/helm/harness/values-production.yaml +++ b/helm/harness/values-production.yaml @@ -1,11 +1,182 @@ +ng_ui: + resources: + limits: + memory: 32Mi + cpu: 200m + requests: + memory: 32Mi + cpu: 200m + +ng_auth_ui: + resources: + limits: + memory: 24Mi + cpu: 200m + requests: + memory: 24Mi + cpu: 200m + manager: - memory: 1024 + config: + common_pool_core_size: 2 + common_pool_max_size: 20 + delegate_capabilities_record_iterator_thread_count: 4 + delegate_task_expiry_check_iterator_thread_count: 4 + deployment_event_listener_count: 2 + deployment_time_series_event_listener_count: 2 + events_framework_netty_threads: 2 + event_delivery_iterator_thread_count: 4 + execution_event_listener_count: 2 + general_notify_event_listener_count: 2 + memory: 512 + orchestration_notify_event_listener_count: 2 + perpetual_task_assignment_iterator_thread_count: 4 + perpetual_task_rebalance_iterator_thread_count: 4 + server_max_threads: 24 + resources: + limits: + memory: 1454Mi + cpu: 1 + requests: + memory: 512Mi + cpu: 200m ng_manager: - memory: 1024 + config: + common_pool_core_size: 2 + common_pool_max_size: 10 + events_framework_netty_threads: 2 + git_fullsync_entity_iterator_thread_pool_size: 4 + memory: 512 + ng_vault_iterator_thread_pool_size: 4 + pms_sdk_execution_pool_core_size: 2 + pms_sdk_execution_pool_max_size: 4 + pms_sdk_orchestration_event_pool_core_size: 2 + pms_sdk_orchestration_event_pool_max_size: 4 + redis_netty_threads: 2 + server_max_threads: 32 + webhook_event_processing_service_thread_size: 4 + resources: + limits: + memory: 1454Mi + cpu: 1 + requests: + memory: 512Mi + cpu: 200m -pms: - memory: 1024 +pipeline: + config: + advise_event_consumer_thread_count: 2 + barrier_iterator_thread_pool_size: 4 + common_pool_core_size: 2 + common_pool_max_size: 10 + create_plan_event_consumer_thread_count: 2 + events_framework_netty_threads: 2 + facilitate_event_consumer_thread_count: 2 + graph_update_event_consumer_thread_count: 2 + interrupt_consumer_thread_count: 2 + interrupt_monitor_iterator_thread_pool_size: 4 + memory: 576 + node_start_event_consumer_thread_count: 2 + orchestration_event_consumer_thread_count: 2 + orchestration_pool_core_size: 2 + orchestration_pool_max_size: 4 + orchestration_visualization_pool_core_size: 2 + orchestration_visualization_pool_max_size: 4 + partial_plan_response_event_consumer_thread_count: 2 + pipeline_execution_pool_core_size: 2 + pipeline_execution_pool_max_size: 20 + plan_notify_event_consumer_thread_count: 2 + pms_notify_event_consumer_thread_count: 2 + pms_sdk_execution_pool_core_size: 2 + pms_sdk_execution_pool_max_size: 4 + pms_sdk_orchestration_event_pool_core_size: 2 + pms_sdk_orchestration_event_pool_max_size: 4 + progress_event_consumer_thread_count: 2 + redis_netty_threads: 2 + resource_restraint_iterator_thread_pool_size: 4 + resume_event_consumer_thread_count: 2 + scheduled_trigger_iterator_thread_pool_size: 4 + sdk_response_event_consumer_thread_count: 2 + server_max_threads: 24 + timeout_engine_iterator_thread_pool_size: 4 + webhook_iterator_thread_pool_size: 4 + resources: + limits: + memory: 1320Mi + cpu: 1 + requests: + memory: 576Mi + cpu: 200m platform: - memory: 512 + config: + common_pool_core_size: 2 + common_pool_max_size: 4 + events_framework_netty_threads: 2 + memory: 128 + redis_netty_threads: 2 + server_max_threads: 24 + resources: + limits: + memory: 372Mi + cpu: 1 + requests: + memory: 128Mi + cpu: 200m + +log_service: + resources: + limits: + memory: 160Mi + cpu: 200m + requests: + memory: 128Mi + cpu: 200m + +scm: + resources: + limits: + memory: 96Mi + cpu: 200m + requests: + memory: 96Mi + cpu: 200m + +delegate_proxy: + resources: + limits: + memory: 48Mi + cpu: 200m + requests: + memory: 48Mi + cpu: 200m + +proxy: + resources: + limits: + memory: 32Mi + cpu: 200m + requests: + memory: 32Mi + cpu: 200m + +redis: + resources: + limits: + memory: 384Mi + cpu: 200m + requests: + memory: 128Mi + cpu: 200m + +mongo: + resources: + limits: + memory: 768Mi + cpu: 200m + requests: + memory: 512Mi + cpu: 200m + storage: 10Gi + wiredTigerCacheSize: 0.375 diff --git a/helm/harness/values.yaml b/helm/harness/values.yaml index 655891e..0d1b522 100644 --- a/helm/harness/values.yaml +++ b/helm/harness/values.yaml @@ -12,14 +12,120 @@ versions: redis: 6.2.5-alpine scm: release-27 +ng_ui: + resources: + limits: + memory: 16Mi + cpu: 200m + requests: + memory: 16Mi + cpu: 200m + +ng_auth_ui: + resources: + limits: + memory: 12Mi + cpu: 200m + requests: + memory: 12Mi + cpu: 200m + manager: - memory: 232 + config: + memory: 232 + resources: + limits: + memory: 780Mi + cpu: 1 + requests: + memory: 512Mi + cpu: 200m ng_manager: - memory: 232 + config: + memory: 232 + resources: + limits: + memory: 752Mi + cpu: 1 + requests: + memory: 512Mi + cpu: 200m -pms: - memory: 200 +pipeline: + config: + memory: 200 + resources: + limits: + memory: 660Mi + cpu: 1 + requests: + memory: 512Mi + cpu: 200m platform: - memory: 60 + config: + memory: 60 + resources: + limits: + memory: 244Mi + cpu: 1 + requests: + memory: 128Mi + cpu: 200m + +log_service: + resources: + limits: + memory: 72Mi + cpu: 200m + requests: + memory: 72Mi + cpu: 200m + +scm: + resources: + limits: + memory: 48Mi + cpu: 200m + requests: + memory: 48Mi + cpu: 200m + +delegate_proxy: + resources: + limits: + memory: 24Mi + cpu: 200m + requests: + memory: 24Mi + cpu: 200m + +proxy: + resources: + limits: + memory: 16Mi + cpu: 200m + requests: + memory: 16Mi + cpu: 200m + +redis: + resources: + limits: + memory: 128Mi + cpu: 200m + requests: + memory: 128Mi + cpu: 200m + +mongo: + resources: + limits: + memory: 320Mi + cpu: 200m + requests: + memory: 320Mi + cpu: 200m + storage: 10Gi + wiredTigerCacheSize: 0.125