2020-06-15 18:20:18 +02:00
|
|
|
import logging
|
2019-11-08 06:28:46 +01:00
|
|
|
import os
|
2019-03-18 08:07:19 +01:00
|
|
|
import yaml
|
2020-06-15 18:20:18 +02:00
|
|
|
from urllib.parse import urlencode
|
2019-11-25 10:28:11 +01:00
|
|
|
from g import versions_file_path, host_root_dir, DEFAULT_UID, INTERNAL_NO_PROXY_DN
|
2020-10-18 18:16:02 +02:00
|
|
|
from models import InternalTLS, Metric
|
2019-11-08 06:28:46 +01:00
|
|
|
from utils.misc import generate_random_string, owner_can_read, other_can_read
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-08-29 09:04:10 +02:00
|
|
|
default_db_max_idle_conns = 2 # NOTE: https://golang.org/pkg/database/sql/#DB.SetMaxIdleConns
|
|
|
|
default_db_max_open_conns = 0 # NOTE: https://golang.org/pkg/database/sql/#DB.SetMaxOpenConns
|
2019-10-23 08:08:15 +02:00
|
|
|
default_https_cert_path = '/your/certificate/path'
|
|
|
|
default_https_key_path = '/your/certificate/path'
|
2019-11-08 06:28:46 +01:00
|
|
|
|
2019-12-25 07:59:47 +01:00
|
|
|
REGISTRY_USER_NAME = 'harbor_registry_user'
|
|
|
|
|
2019-11-08 06:28:46 +01:00
|
|
|
|
2019-10-23 08:08:15 +02:00
|
|
|
def validate(conf: dict, **kwargs):
|
|
|
|
# hostname validate
|
|
|
|
if conf.get('hostname') == '127.0.0.1':
|
|
|
|
raise Exception("127.0.0.1 can not be the hostname")
|
|
|
|
if conf.get('hostname') == 'reg.mydomain.com':
|
|
|
|
raise Exception("Please specify hostname")
|
2019-08-29 09:04:10 +02:00
|
|
|
|
2019-10-23 08:08:15 +02:00
|
|
|
# protocol validate
|
2018-11-15 04:09:57 +01:00
|
|
|
protocol = conf.get("protocol")
|
|
|
|
if protocol != "https" and kwargs.get('notary_mode'):
|
|
|
|
raise Exception(
|
|
|
|
"Error: the protocol must be https when Harbor is deployed with Notary")
|
|
|
|
if protocol == "https":
|
2019-10-23 08:08:15 +02:00
|
|
|
if not conf.get("cert_path") or conf["cert_path"] == default_https_cert_path:
|
2018-11-15 04:09:57 +01:00
|
|
|
raise Exception("Error: The protocol is https but attribute ssl_cert is not set")
|
2019-10-23 08:08:15 +02:00
|
|
|
if not conf.get("cert_key_path") or conf['cert_key_path'] == default_https_key_path:
|
2018-11-15 04:09:57 +01:00
|
|
|
raise Exception("Error: The protocol is https but attribute ssl_cert_key is not set")
|
2019-10-23 08:08:15 +02:00
|
|
|
if protocol == "http":
|
|
|
|
logging.warning("WARNING: HTTP protocol is insecure. Harbor will deprecate http protocol in the future. Please make sure to upgrade to https")
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-06-21 08:18:28 +02:00
|
|
|
# log endpoint validate
|
|
|
|
if ('log_ep_host' in conf) and not conf['log_ep_host']:
|
|
|
|
raise Exception('Error: must set log endpoint host to enable external host')
|
|
|
|
if ('log_ep_port' in conf) and not conf['log_ep_port']:
|
|
|
|
raise Exception('Error: must set log endpoint port to enable external host')
|
|
|
|
if ('log_ep_protocol' in conf) and (conf['log_ep_protocol'] not in ['udp', 'tcp']):
|
|
|
|
raise Exception("Protocol in external log endpoint must be one of 'udp' or 'tcp' ")
|
|
|
|
|
2018-11-15 04:09:57 +01:00
|
|
|
# Storage validate
|
|
|
|
valid_storage_drivers = ["filesystem", "azure", "gcs", "s3", "swift", "oss"]
|
|
|
|
storage_provider_name = conf.get("storage_provider_name")
|
|
|
|
if storage_provider_name not in valid_storage_drivers:
|
|
|
|
raise Exception("Error: storage driver %s is not supported, only the following ones are supported: %s" % (
|
|
|
|
storage_provider_name, ",".join(valid_storage_drivers)))
|
|
|
|
|
|
|
|
storage_provider_config = conf.get("storage_provider_config") ## original is registry_storage_provider_config
|
|
|
|
if storage_provider_name != "filesystem":
|
|
|
|
if storage_provider_config == "":
|
|
|
|
raise Exception(
|
|
|
|
"Error: no provider configurations are provided for provider %s" % storage_provider_name)
|
2019-11-08 06:28:46 +01:00
|
|
|
# ca_bundle validate
|
|
|
|
if conf.get('registry_custom_ca_bundle_path'):
|
|
|
|
registry_custom_ca_bundle_path = conf.get('registry_custom_ca_bundle_path') or ''
|
2019-11-27 07:54:03 +01:00
|
|
|
if registry_custom_ca_bundle_path.startswith('/data/'):
|
|
|
|
ca_bundle_host_path = registry_custom_ca_bundle_path
|
|
|
|
else:
|
|
|
|
ca_bundle_host_path = os.path.join(host_root_dir, registry_custom_ca_bundle_path.lstrip('/'))
|
2019-11-08 06:28:46 +01:00
|
|
|
try:
|
|
|
|
uid = os.stat(ca_bundle_host_path).st_uid
|
|
|
|
st_mode = os.stat(ca_bundle_host_path).st_mode
|
|
|
|
except Exception as e:
|
|
|
|
logging.error(e)
|
|
|
|
raise Exception('Can not get file info')
|
|
|
|
err_msg = 'Cert File {} should be owned by user with uid 10000 or readable by others'.format(registry_custom_ca_bundle_path)
|
|
|
|
if uid == DEFAULT_UID and not owner_can_read(st_mode):
|
|
|
|
raise Exception(err_msg)
|
|
|
|
if uid != DEFAULT_UID and not other_can_read(st_mode):
|
|
|
|
raise Exception(err_msg)
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-10-22 15:24:49 +02:00
|
|
|
# TODO:
|
|
|
|
# If user enable trust cert dir, need check if the files in this dir is readable.
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-12-25 07:59:47 +01:00
|
|
|
|
2019-03-18 08:07:19 +01:00
|
|
|
def parse_versions():
|
|
|
|
if not versions_file_path.is_file():
|
|
|
|
return {}
|
|
|
|
with open('versions') as f:
|
|
|
|
versions = yaml.load(f)
|
|
|
|
return versions
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-12-25 07:59:47 +01:00
|
|
|
|
2020-10-21 11:19:02 +02:00
|
|
|
def parse_yaml_config(config_file_path, with_notary, with_trivy, with_chartmuseum):
|
2018-11-15 04:09:57 +01:00
|
|
|
'''
|
|
|
|
:param configs: config_parser object
|
|
|
|
:returns: dict of configs
|
|
|
|
'''
|
|
|
|
|
|
|
|
with open(config_file_path) as f:
|
2019-02-25 11:16:35 +01:00
|
|
|
configs = yaml.load(f)
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-04-02 09:21:50 +02:00
|
|
|
config_dict = {
|
2020-04-28 07:17:24 +02:00
|
|
|
'portal_url': 'http://portal:8080',
|
2020-02-11 06:47:55 +01:00
|
|
|
'registry_url': 'http://registry:5000',
|
|
|
|
'registry_controller_url': 'http://registryctl:8080',
|
|
|
|
'core_url': 'http://core:8080',
|
|
|
|
'core_local_url': 'http://127.0.0.1:8080',
|
|
|
|
'token_service_url': 'http://core:8080/service/token',
|
2019-04-02 09:21:50 +02:00
|
|
|
'jobservice_url': 'http://jobservice:8080',
|
2020-02-10 16:46:26 +01:00
|
|
|
'trivy_adapter_url': 'http://trivy-adapter:8080',
|
2019-04-02 09:21:50 +02:00
|
|
|
'notary_url': 'http://notary-server:4443',
|
|
|
|
'chart_repository_url': 'http://chartmuseum:9999'
|
|
|
|
}
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-04-02 14:08:26 +02:00
|
|
|
config_dict['hostname'] = configs["hostname"]
|
|
|
|
|
|
|
|
config_dict['protocol'] = 'http'
|
|
|
|
http_config = configs.get('http') or {}
|
|
|
|
config_dict['http_port'] = http_config.get('port', 80)
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-04-02 14:08:26 +02:00
|
|
|
https_config = configs.get('https')
|
2019-04-02 09:21:50 +02:00
|
|
|
if https_config:
|
|
|
|
config_dict['protocol'] = 'https'
|
|
|
|
config_dict['https_port'] = https_config.get('port', 443)
|
2019-04-02 14:08:26 +02:00
|
|
|
config_dict['cert_path'] = https_config["certificate"]
|
|
|
|
config_dict['cert_key_path'] = https_config["private_key"]
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-05-10 04:44:05 +02:00
|
|
|
if configs.get('external_url'):
|
|
|
|
config_dict['public_url'] = configs.get('external_url')
|
|
|
|
else:
|
|
|
|
if config_dict['protocol'] == 'https':
|
2019-05-10 10:53:25 +02:00
|
|
|
if config_dict['https_port'] == 443:
|
|
|
|
config_dict['public_url'] = '{protocol}://{hostname}'.format(**config_dict)
|
|
|
|
else:
|
|
|
|
config_dict['public_url'] = '{protocol}://{hostname}:{https_port}'.format(**config_dict)
|
2019-05-10 04:44:05 +02:00
|
|
|
else:
|
2019-05-10 10:53:25 +02:00
|
|
|
if config_dict['http_port'] == 80:
|
|
|
|
config_dict['public_url'] = '{protocol}://{hostname}'.format(**config_dict)
|
|
|
|
else:
|
|
|
|
config_dict['public_url'] = '{protocol}://{hostname}:{http_port}'.format(**config_dict)
|
2019-04-01 12:06:24 +02:00
|
|
|
|
|
|
|
# DB configs
|
|
|
|
db_configs = configs.get('database')
|
|
|
|
if db_configs:
|
2019-05-10 04:44:05 +02:00
|
|
|
# harbor db
|
|
|
|
config_dict['harbor_db_host'] = 'postgresql'
|
|
|
|
config_dict['harbor_db_port'] = 5432
|
|
|
|
config_dict['harbor_db_name'] = 'registry'
|
|
|
|
config_dict['harbor_db_username'] = 'postgres'
|
|
|
|
config_dict['harbor_db_password'] = db_configs.get("password") or ''
|
|
|
|
config_dict['harbor_db_sslmode'] = 'disable'
|
2019-08-29 09:04:10 +02:00
|
|
|
config_dict['harbor_db_max_idle_conns'] = db_configs.get("max_idle_conns") or default_db_max_idle_conns
|
|
|
|
config_dict['harbor_db_max_open_conns'] = db_configs.get("max_open_conns") or default_db_max_open_conns
|
2019-09-03 06:45:46 +02:00
|
|
|
|
|
|
|
if with_notary:
|
|
|
|
# notary signer
|
|
|
|
config_dict['notary_signer_db_host'] = 'postgresql'
|
|
|
|
config_dict['notary_signer_db_port'] = 5432
|
|
|
|
config_dict['notary_signer_db_name'] = 'notarysigner'
|
|
|
|
config_dict['notary_signer_db_username'] = 'signer'
|
|
|
|
config_dict['notary_signer_db_password'] = 'password'
|
|
|
|
config_dict['notary_signer_db_sslmode'] = 'disable'
|
|
|
|
# notary server
|
|
|
|
config_dict['notary_server_db_host'] = 'postgresql'
|
|
|
|
config_dict['notary_server_db_port'] = 5432
|
|
|
|
config_dict['notary_server_db_name'] = 'notaryserver'
|
|
|
|
config_dict['notary_server_db_username'] = 'server'
|
|
|
|
config_dict['notary_server_db_password'] = 'password'
|
|
|
|
config_dict['notary_server_db_sslmode'] = 'disable'
|
2019-04-01 12:06:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Data path volume
|
2019-04-02 14:08:26 +02:00
|
|
|
config_dict['data_volume'] = configs['data_volume']
|
2019-04-01 12:06:24 +02:00
|
|
|
|
|
|
|
# Initial Admin Password
|
2019-04-02 14:08:26 +02:00
|
|
|
config_dict['harbor_admin_password'] = configs["harbor_admin_password"]
|
2019-04-01 12:06:24 +02:00
|
|
|
|
|
|
|
# Registry storage configs
|
2019-04-02 09:21:50 +02:00
|
|
|
storage_config = configs.get('storage_service') or {}
|
|
|
|
|
2019-04-02 14:08:26 +02:00
|
|
|
config_dict['registry_custom_ca_bundle_path'] = storage_config.get('ca_bundle') or ''
|
2019-04-02 09:21:50 +02:00
|
|
|
|
2019-04-02 14:08:26 +02:00
|
|
|
if storage_config.get('filesystem'):
|
|
|
|
config_dict['storage_provider_name'] = 'filesystem'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['filesystem']
|
|
|
|
elif storage_config.get('azure'):
|
|
|
|
config_dict['storage_provider_name'] = 'azure'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['azure']
|
|
|
|
elif storage_config.get('gcs'):
|
|
|
|
config_dict['storage_provider_name'] = 'gcs'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['gcs']
|
|
|
|
elif storage_config.get('s3'):
|
|
|
|
config_dict['storage_provider_name'] = 's3'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['s3']
|
|
|
|
elif storage_config.get('swift'):
|
|
|
|
config_dict['storage_provider_name'] = 'swift'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['swift']
|
|
|
|
elif storage_config.get('oss'):
|
|
|
|
config_dict['storage_provider_name'] = 'oss'
|
|
|
|
config_dict['storage_provider_config'] = storage_config['oss']
|
|
|
|
else:
|
|
|
|
config_dict['storage_provider_name'] = 'filesystem'
|
|
|
|
config_dict['storage_provider_config'] = {}
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-04-30 11:05:27 +02:00
|
|
|
if storage_config.get('redirect'):
|
|
|
|
config_dict['storage_redirect_disabled'] = storage_config['redirect']['disabled']
|
|
|
|
|
2019-08-10 17:59:33 +02:00
|
|
|
# Global proxy configs
|
|
|
|
proxy_config = configs.get('proxy') or {}
|
|
|
|
proxy_components = proxy_config.get('components') or []
|
2019-11-25 10:28:11 +01:00
|
|
|
no_proxy_config = proxy_config.get('no_proxy')
|
|
|
|
all_no_proxy = INTERNAL_NO_PROXY_DN
|
|
|
|
if no_proxy_config:
|
|
|
|
all_no_proxy |= set(no_proxy_config.split(','))
|
|
|
|
|
2019-08-10 17:59:33 +02:00
|
|
|
for proxy_component in proxy_components:
|
|
|
|
config_dict[proxy_component + '_http_proxy'] = proxy_config.get('http_proxy') or ''
|
|
|
|
config_dict[proxy_component + '_https_proxy'] = proxy_config.get('https_proxy') or ''
|
2019-11-25 10:28:11 +01:00
|
|
|
config_dict[proxy_component + '_no_proxy'] = ','.join(all_no_proxy)
|
2019-08-10 17:59:33 +02:00
|
|
|
|
2020-03-12 09:37:41 +01:00
|
|
|
# Trivy configs, optional
|
|
|
|
trivy_configs = configs.get("trivy") or {}
|
2020-03-18 17:11:43 +01:00
|
|
|
config_dict['trivy_github_token'] = trivy_configs.get("github_token") or ''
|
|
|
|
config_dict['trivy_skip_update'] = trivy_configs.get("skip_update") or False
|
|
|
|
config_dict['trivy_ignore_unfixed'] = trivy_configs.get("ignore_unfixed") or False
|
2020-04-15 20:08:00 +02:00
|
|
|
config_dict['trivy_insecure'] = trivy_configs.get("insecure") or False
|
2020-03-12 09:37:41 +01:00
|
|
|
|
2019-05-08 06:59:44 +02:00
|
|
|
# Chart configs
|
|
|
|
chart_configs = configs.get("chart") or {}
|
2020-04-26 07:04:29 +02:00
|
|
|
if chart_configs.get('absolute_url') == 'enabled':
|
|
|
|
config_dict['chart_absolute_url'] = True
|
|
|
|
else:
|
|
|
|
config_dict['chart_absolute_url'] = False
|
2019-05-08 06:59:44 +02:00
|
|
|
|
2019-04-01 12:06:24 +02:00
|
|
|
# jobservice config
|
2019-04-02 14:08:26 +02:00
|
|
|
js_config = configs.get('jobservice') or {}
|
|
|
|
config_dict['max_job_workers'] = js_config["max_job_workers"]
|
2019-04-01 12:06:24 +02:00
|
|
|
config_dict['jobservice_secret'] = generate_random_string(16)
|
|
|
|
|
2019-08-07 14:56:31 +02:00
|
|
|
# notification config
|
|
|
|
notification_config = configs.get('notification') or {}
|
|
|
|
config_dict['notification_webhook_job_max_retry'] = notification_config["webhook_job_max_retry"]
|
2018-11-15 04:09:57 +01:00
|
|
|
|
|
|
|
# Log configs
|
2019-05-15 04:28:52 +02:00
|
|
|
allowed_levels = ['debug', 'info', 'warning', 'error', 'fatal']
|
2019-02-25 11:16:35 +01:00
|
|
|
log_configs = configs.get('log') or {}
|
2019-06-21 08:18:28 +02:00
|
|
|
|
2019-05-15 04:28:52 +02:00
|
|
|
log_level = log_configs['level']
|
|
|
|
if log_level not in allowed_levels:
|
|
|
|
raise Exception('log level must be one of debug, info, warning, error, fatal')
|
|
|
|
config_dict['log_level'] = log_level.lower()
|
2019-04-01 12:06:24 +02:00
|
|
|
|
2019-06-21 08:18:28 +02:00
|
|
|
# parse local log related configs
|
|
|
|
local_logs = log_configs.get('local') or {}
|
|
|
|
if local_logs:
|
|
|
|
config_dict['log_location'] = local_logs.get('location') or '/var/log/harbor'
|
|
|
|
config_dict['log_rotate_count'] = local_logs.get('rotate_count') or 50
|
|
|
|
config_dict['log_rotate_size'] = local_logs.get('rotate_size') or '200M'
|
|
|
|
|
|
|
|
# parse external log endpoint related configs
|
|
|
|
if log_configs.get('external_endpoint'):
|
|
|
|
config_dict['log_external'] = True
|
|
|
|
config_dict['log_ep_protocol'] = log_configs['external_endpoint']['protocol']
|
|
|
|
config_dict['log_ep_host'] = log_configs['external_endpoint']['host']
|
|
|
|
config_dict['log_ep_port'] = log_configs['external_endpoint']['port']
|
|
|
|
else:
|
|
|
|
config_dict['log_external'] = False
|
2019-04-01 12:06:24 +02:00
|
|
|
|
2019-05-14 05:26:44 +02:00
|
|
|
# external DB, optional, if external_db enabled, it will cover the database config
|
2019-04-02 14:08:26 +02:00
|
|
|
external_db_configs = configs.get('external_database') or {}
|
2019-04-01 12:06:24 +02:00
|
|
|
if external_db_configs:
|
2019-08-26 09:04:57 +02:00
|
|
|
config_dict['external_database'] = True
|
2019-05-10 04:44:05 +02:00
|
|
|
# harbor db
|
|
|
|
config_dict['harbor_db_host'] = external_db_configs['harbor']['host']
|
|
|
|
config_dict['harbor_db_port'] = external_db_configs['harbor']['port']
|
|
|
|
config_dict['harbor_db_name'] = external_db_configs['harbor']['db_name']
|
|
|
|
config_dict['harbor_db_username'] = external_db_configs['harbor']['username']
|
|
|
|
config_dict['harbor_db_password'] = external_db_configs['harbor']['password']
|
|
|
|
config_dict['harbor_db_sslmode'] = external_db_configs['harbor']['ssl_mode']
|
2019-08-29 09:04:10 +02:00
|
|
|
config_dict['harbor_db_max_idle_conns'] = external_db_configs['harbor'].get("max_idle_conns") or default_db_max_idle_conns
|
|
|
|
config_dict['harbor_db_max_open_conns'] = external_db_configs['harbor'].get("max_open_conns") or default_db_max_open_conns
|
2019-09-03 06:45:46 +02:00
|
|
|
|
|
|
|
if with_notary:
|
|
|
|
# notary signer
|
|
|
|
config_dict['notary_signer_db_host'] = external_db_configs['notary_signer']['host']
|
|
|
|
config_dict['notary_signer_db_port'] = external_db_configs['notary_signer']['port']
|
|
|
|
config_dict['notary_signer_db_name'] = external_db_configs['notary_signer']['db_name']
|
|
|
|
config_dict['notary_signer_db_username'] = external_db_configs['notary_signer']['username']
|
|
|
|
config_dict['notary_signer_db_password'] = external_db_configs['notary_signer']['password']
|
|
|
|
config_dict['notary_signer_db_sslmode'] = external_db_configs['notary_signer']['ssl_mode']
|
|
|
|
# notary server
|
|
|
|
config_dict['notary_server_db_host'] = external_db_configs['notary_server']['host']
|
|
|
|
config_dict['notary_server_db_port'] = external_db_configs['notary_server']['port']
|
|
|
|
config_dict['notary_server_db_name'] = external_db_configs['notary_server']['db_name']
|
|
|
|
config_dict['notary_server_db_username'] = external_db_configs['notary_server']['username']
|
|
|
|
config_dict['notary_server_db_password'] = external_db_configs['notary_server']['password']
|
|
|
|
config_dict['notary_server_db_sslmode'] = external_db_configs['notary_server']['ssl_mode']
|
2019-08-26 09:04:57 +02:00
|
|
|
else:
|
|
|
|
config_dict['external_database'] = False
|
2019-04-01 12:06:24 +02:00
|
|
|
|
2019-11-13 03:35:21 +01:00
|
|
|
# update redis configs
|
2020-10-21 11:19:02 +02:00
|
|
|
config_dict.update(get_redis_configs(configs.get("external_redis", None), with_trivy))
|
2018-11-15 04:09:57 +01:00
|
|
|
|
2019-04-01 12:06:24 +02:00
|
|
|
# auto generated secret string for core
|
2018-11-15 04:09:57 +01:00
|
|
|
config_dict['core_secret'] = generate_random_string(16)
|
2019-04-01 12:06:24 +02:00
|
|
|
|
2019-05-06 10:32:00 +02:00
|
|
|
# UAA configs
|
|
|
|
config_dict['uaa'] = configs.get('uaa') or {}
|
|
|
|
|
2019-12-25 07:59:47 +01:00
|
|
|
config_dict['registry_username'] = REGISTRY_USER_NAME
|
|
|
|
config_dict['registry_password'] = generate_random_string(32)
|
2020-02-11 06:47:55 +01:00
|
|
|
|
2020-03-19 03:37:58 +01:00
|
|
|
internal_tls_config = configs.get('internal_tls')
|
2020-02-11 06:47:55 +01:00
|
|
|
# TLS related configs
|
2020-03-19 03:37:58 +01:00
|
|
|
if internal_tls_config and internal_tls_config.get('enabled'):
|
2020-03-16 16:20:05 +01:00
|
|
|
config_dict['internal_tls'] = InternalTLS(
|
|
|
|
internal_tls_config['enabled'],
|
2020-04-21 15:14:32 +02:00
|
|
|
False,
|
2020-03-16 16:20:05 +01:00
|
|
|
internal_tls_config['dir'],
|
|
|
|
configs['data_volume'],
|
|
|
|
with_notary=with_notary,
|
2020-03-17 10:30:25 +01:00
|
|
|
with_trivy=with_trivy,
|
2020-03-16 16:20:05 +01:00
|
|
|
with_chartmuseum=with_chartmuseum,
|
|
|
|
external_database=config_dict['external_database'])
|
|
|
|
else:
|
|
|
|
config_dict['internal_tls'] = InternalTLS()
|
2020-02-14 14:11:52 +01:00
|
|
|
|
2020-10-18 18:16:02 +02:00
|
|
|
# metric configs
|
|
|
|
metric_config = configs.get('metric')
|
|
|
|
if metric_config:
|
|
|
|
config_dict['metric'] = Metric(metric_config['enabled'], metric_config['port'], metric_config['path'])
|
|
|
|
else:
|
|
|
|
config_dict['metric'] = Metric()
|
|
|
|
|
2020-02-14 14:11:52 +01:00
|
|
|
if config_dict['internal_tls'].enabled:
|
2020-04-28 07:17:24 +02:00
|
|
|
config_dict['portal_url'] = 'https://portal:8443'
|
2020-02-14 14:11:52 +01:00
|
|
|
config_dict['registry_url'] = 'https://registry:5443'
|
|
|
|
config_dict['registry_controller_url'] = 'https://registryctl:8443'
|
|
|
|
config_dict['core_url'] = 'https://core:8443'
|
|
|
|
config_dict['core_local_url'] = 'https://core:8443'
|
|
|
|
config_dict['token_service_url'] = 'https://core:8443/service/token'
|
|
|
|
config_dict['jobservice_url'] = 'https://jobservice:8443'
|
2020-03-18 04:25:04 +01:00
|
|
|
config_dict['trivy_adapter_url'] = 'https://trivy-adapter:8443'
|
2020-02-14 14:11:52 +01:00
|
|
|
# config_dict['notary_url'] = 'http://notary-server:4443'
|
|
|
|
config_dict['chart_repository_url'] = 'https://chartmuseum:9443'
|
2020-02-11 06:47:55 +01:00
|
|
|
|
2019-08-10 17:59:33 +02:00
|
|
|
return config_dict
|
2019-11-13 03:35:21 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_redis_url(db, redis=None):
|
2020-06-15 18:20:18 +02:00
|
|
|
"""Returns redis url with format `redis://[arbitrary_username:password@]ipaddress:port/database_index?idle_timeout_seconds=30`
|
2019-11-13 03:35:21 +01:00
|
|
|
|
|
|
|
>>> get_redis_url(1)
|
|
|
|
'redis://redis:6379/1'
|
2020-06-15 18:20:18 +02:00
|
|
|
>>> get_redis_url(1, {'host': 'localhost:6379', 'password': 'password'})
|
2019-11-13 03:35:21 +01:00
|
|
|
'redis://anonymous:password@localhost:6379/1'
|
2020-06-15 18:20:18 +02:00
|
|
|
>>> get_redis_url(1, {'host':'host1:26379,host2:26379', 'sentinel_master_set':'mymaster', 'password':'password1'})
|
|
|
|
'redis+sentinel://anonymous:password@host1:26379,host2:26379/mymaster/1'
|
|
|
|
>>> get_redis_url(1, {'host':'host1:26379,host2:26379', 'sentinel_master_set':'mymaster', 'password':'password1','idle_timeout_seconds':30})
|
|
|
|
'redis+sentinel://anonymous:password@host1:26379,host2:26379/mymaster/1?idle_timeout_seconds=30'
|
|
|
|
|
2019-11-13 03:35:21 +01:00
|
|
|
"""
|
|
|
|
kwargs = {
|
2020-06-15 18:20:18 +02:00
|
|
|
'host': 'redis:6379',
|
2019-11-13 03:35:21 +01:00
|
|
|
'password': '',
|
|
|
|
}
|
|
|
|
kwargs.update(redis or {})
|
2020-06-15 18:20:18 +02:00
|
|
|
kwargs['scheme'] = kwargs.get('sentinel_master_set', None) and 'redis+sentinel' or 'redis'
|
|
|
|
kwargs['db_part'] = db and ("/%s" % db) or ""
|
|
|
|
kwargs['sentinel_part'] = kwargs.get('sentinel_master_set', None) and ("/" + kwargs['sentinel_master_set']) or ''
|
|
|
|
kwargs['password_part'] = kwargs.get('password', None) and (':%s@' % kwargs['password']) or ''
|
2019-11-13 03:35:21 +01:00
|
|
|
|
2020-06-15 18:20:18 +02:00
|
|
|
return "{scheme}://{password_part}{host}{sentinel_part}{db_part}".format(**kwargs) + get_redis_url_param(kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
def get_redis_url_param(redis=None):
|
|
|
|
params = {}
|
|
|
|
if redis and 'idle_timeout_seconds' in redis:
|
|
|
|
params['idle_timeout_seconds'] = redis['idle_timeout_seconds']
|
|
|
|
if params:
|
|
|
|
return "?" + urlencode(params)
|
|
|
|
return ""
|
2019-11-13 03:35:21 +01:00
|
|
|
|
|
|
|
|
2020-10-21 11:19:02 +02:00
|
|
|
def get_redis_configs(external_redis=None, with_trivy=True):
|
2019-11-13 03:35:21 +01:00
|
|
|
"""Returns configs for redis
|
|
|
|
|
|
|
|
>>> get_redis_configs()['external_redis']
|
|
|
|
False
|
|
|
|
>>> get_redis_configs()['redis_url_reg']
|
|
|
|
'redis://redis:6379/1'
|
|
|
|
>>> get_redis_configs()['redis_url_js']
|
|
|
|
'redis://redis:6379/2'
|
2020-03-12 09:37:41 +01:00
|
|
|
>>> get_redis_configs()['trivy_redis_url']
|
2020-02-13 17:57:02 +01:00
|
|
|
'redis://redis:6379/5'
|
2019-11-13 03:35:21 +01:00
|
|
|
|
2020-04-03 06:09:26 +02:00
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': ''})['redis_password']
|
|
|
|
''
|
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': None})['redis_password']
|
|
|
|
''
|
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': None})['redis_url_reg']
|
|
|
|
'redis://localhost:6379/1'
|
|
|
|
|
2019-11-13 03:35:21 +01:00
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': 'pass'})['external_redis']
|
|
|
|
True
|
2020-04-03 06:09:26 +02:00
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': 'pass'})['redis_password']
|
|
|
|
'pass'
|
2019-11-13 03:35:21 +01:00
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': 'pass'})['redis_url_reg']
|
|
|
|
'redis://anonymous:pass@localhost:6379/1'
|
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': 'pass'})['redis_url_js']
|
|
|
|
'redis://anonymous:pass@localhost:6379/2'
|
2020-03-12 09:37:41 +01:00
|
|
|
>>> get_redis_configs({'host': 'localhost', 'password': 'pass'})['trivy_redis_url']
|
2020-02-13 17:57:02 +01:00
|
|
|
'redis://anonymous:pass@localhost:6379/5'
|
2019-11-13 03:35:21 +01:00
|
|
|
|
2020-03-12 09:37:41 +01:00
|
|
|
>>> 'trivy_redis_url' not in get_redis_configs(with_trivy=False)
|
2020-02-13 17:57:02 +01:00
|
|
|
True
|
2019-11-13 03:35:21 +01:00
|
|
|
"""
|
2020-04-03 06:09:26 +02:00
|
|
|
external_redis = external_redis or {}
|
2019-11-13 03:35:21 +01:00
|
|
|
|
|
|
|
configs = dict(external_redis=bool(external_redis))
|
|
|
|
|
|
|
|
# internal redis config as the default
|
|
|
|
redis = {
|
2020-06-15 18:20:18 +02:00
|
|
|
'host': 'redis:6379',
|
2019-11-13 03:35:21 +01:00
|
|
|
'password': '',
|
|
|
|
'registry_db_index': 1,
|
|
|
|
'jobservice_db_index': 2,
|
|
|
|
'chartmuseum_db_index': 3,
|
2020-02-13 17:57:02 +01:00
|
|
|
'trivy_db_index': 5,
|
2020-02-23 05:31:56 +01:00
|
|
|
'idle_timeout_seconds': 30,
|
2019-11-13 03:35:21 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
# overwriting existing keys by external_redis
|
2020-04-03 06:09:26 +02:00
|
|
|
redis.update({key: value for (key, value) in external_redis.items() if value})
|
2019-11-13 03:35:21 +01:00
|
|
|
|
2020-06-15 18:20:18 +02:00
|
|
|
configs['redis_url_core'] = get_redis_url(0, redis)
|
|
|
|
configs['redis_url_chart'] = get_redis_url(redis['chartmuseum_db_index'], redis)
|
|
|
|
configs['redis_url_js'] = get_redis_url(redis['jobservice_db_index'], redis)
|
|
|
|
configs['redis_url_reg'] = get_redis_url(redis['registry_db_index'], redis)
|
2019-11-13 03:35:21 +01:00
|
|
|
|
2020-02-13 17:57:02 +01:00
|
|
|
if with_trivy:
|
2020-06-15 18:20:18 +02:00
|
|
|
configs['trivy_redis_url'] = get_redis_url(redis['trivy_db_index'], redis)
|
2020-02-13 17:57:02 +01:00
|
|
|
|
2019-11-13 03:35:21 +01:00
|
|
|
return configs
|