Merge branch 'main' into uffizzi-k8s

This commit is contained in:
Orlix 2023-11-22 17:03:38 +02:00 committed by GitHub
commit 06199e415e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
50 changed files with 1745 additions and 137 deletions

View File

@ -44,7 +44,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
@ -105,7 +105,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
@ -160,7 +160,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
@ -215,7 +215,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
@ -268,7 +268,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:

View File

@ -26,7 +26,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- name: Setup Docker - name: Setup Docker
uses: docker-practice/actions-setup-docker@master uses: docker-practice/actions-setup-docker@master

View File

@ -28,7 +28,7 @@ jobs:
- name: Set up Go 1.21 - name: Set up Go 1.21
uses: actions/setup-go@v1 uses: actions/setup-go@v1
with: with:
go-version: 1.21.3 go-version: 1.21.4
id: go id: go
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:

View File

@ -164,6 +164,8 @@ Harbor backend is written in [Go](http://golang.org/). If you don't have a Harbo
| 2.7 | 1.19.4 | | 2.7 | 1.19.4 |
| 2.8 | 1.20.6 | | 2.8 | 1.20.6 |
| 2.9 | 1.21.3 | | 2.9 | 1.21.3 |
| 2.10 | 1.21.4 |
Ensure your GOPATH and PATH have been configured in accordance with the Go environment instructions. Ensure your GOPATH and PATH have been configured in accordance with the Go environment instructions.

View File

@ -140,7 +140,7 @@ GOINSTALL=$(GOCMD) install
GOTEST=$(GOCMD) test GOTEST=$(GOCMD) test
GODEP=$(GOTEST) -i GODEP=$(GOTEST) -i
GOFMT=gofmt -w GOFMT=gofmt -w
GOBUILDIMAGE=golang:1.21.3 GOBUILDIMAGE=golang:1.21.4
GOBUILDPATHINCONTAINER=/harbor GOBUILDPATHINCONTAINER=/harbor
# go build # go build

View File

@ -153,7 +153,7 @@ log:
# port: 5140 # port: 5140
#This attribute is for migrator to detect the version of the .cfg file, DO NOT MODIFY! #This attribute is for migrator to detect the version of the .cfg file, DO NOT MODIFY!
_version: 2.9.0 _version: 2.10.0
# Uncomment external_database if using external database. # Uncomment external_database if using external database.
# external_database: # external_database:

View File

@ -1 +1,3 @@
DROP TABLE IF EXISTS harbor_resource_label; DROP TABLE IF EXISTS harbor_resource_label;
CREATE INDEX IF NOT EXISTS idx_artifact_accessory_subject_artifact_id ON artifact_accessory (subject_artifact_id);

View File

@ -10,7 +10,7 @@ from migrations import accept_versions
@click.command() @click.command()
@click.option('-i', '--input', 'input_', required=True, help="The path of original config file") @click.option('-i', '--input', 'input_', required=True, help="The path of original config file")
@click.option('-o', '--output', default='', help="the path of output config file") @click.option('-o', '--output', default='', help="the path of output config file")
@click.option('-t', '--target', default='2.9.0', help="target version of input path") @click.option('-t', '--target', default='2.10.0', help="target version of input path")
def migrate(input_, output, target): def migrate(input_, output, target):
""" """
migrate command will migrate config file style to specific version migrate command will migrate config file style to specific version

View File

@ -2,4 +2,4 @@ import os
MIGRATION_BASE_DIR = os.path.dirname(__file__) MIGRATION_BASE_DIR = os.path.dirname(__file__)
accept_versions = {'1.9.0', '1.10.0', '2.0.0', '2.1.0', '2.2.0', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0'} accept_versions = {'1.9.0', '1.10.0', '2.0.0', '2.1.0', '2.2.0', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0','2.10.0'}

View File

@ -0,0 +1,21 @@
import os
from jinja2 import Environment, FileSystemLoader, StrictUndefined, select_autoescape
from utils.migration import read_conf
revision = '2.10.0'
down_revisions = ['2.9.0']
def migrate(input_cfg, output_cfg):
current_dir = os.path.dirname(__file__)
tpl = Environment(
loader=FileSystemLoader(current_dir),
undefined=StrictUndefined,
trim_blocks=True,
lstrip_blocks=True,
autoescape = select_autoescape()
).get_template('harbor.yml.jinja')
config_dict = read_conf(input_cfg)
with open(output_cfg, 'w') as f:
f.write(tpl.render(**config_dict))

View File

@ -0,0 +1,666 @@
# Configuration file of Harbor
# The IP address or hostname to access admin UI and registry service.
# DO NOT use localhost or 127.0.0.1, because Harbor needs to be accessed by external clients.
hostname: {{ hostname }}
# http related config
{% if http is defined %}
http:
# port for http, default is 80. If https enabled, this port will redirect to https port
port: {{ http.port }}
{% else %}
# http:
# # port for http, default is 80. If https enabled, this port will redirect to https port
# port: 80
{% endif %}
{% if https is defined %}
# https related config
https:
# https port for harbor, default is 443
port: {{ https.port }}
# The path of cert and key files for nginx
certificate: {{ https.certificate }}
private_key: {{ https.private_key }}
{% else %}
# https related config
# https:
# # https port for harbor, default is 443
# port: 443
# # The path of cert and key files for nginx
# certificate: /your/certificate/path
# private_key: /your/private/key/path
{% endif %}
{% if internal_tls is defined %}
# Uncomment following will enable tls communication between all harbor components
internal_tls:
# set enabled to true means internal tls is enabled
enabled: {{ internal_tls.enabled | lower }}
# put your cert and key files on dir
dir: {{ internal_tls.dir }}
# enable strong ssl ciphers (default: false)
{% if internal_tls.strong_ssl_ciphers is defined %}
strong_ssl_ciphers: {{ internal_tls.strong_ssl_ciphers | lower }}
{% else %}
strong_ssl_ciphers: false
{% endif %}
{% else %}
# internal_tls:
# # set enabled to true means internal tls is enabled
# enabled: true
# # put your cert and key files on dir
# dir: /etc/harbor/tls/internal
# # enable strong ssl ciphers (default: false)
# strong_ssl_ciphers: false
{% endif %}
# Uncomment external_url if you want to enable external proxy
# And when it enabled the hostname will no longer used
{% if external_url is defined %}
external_url: {{ external_url }}
{% else %}
# external_url: https://reg.mydomain.com:8433
{% endif %}
# The initial password of Harbor admin
# It only works in first time to install harbor
# Remember Change the admin password from UI after launching Harbor.
{% if harbor_admin_password is defined %}
harbor_admin_password: {{ harbor_admin_password }}
{% else %}
harbor_admin_password: Harbor12345
{% endif %}
# Harbor DB configuration
database:
{% if database is defined %}
# The password for the root user of Harbor DB. Change this before any production use.
password: {{ database.password}}
# The maximum number of connections in the idle connection pool. If it <=0, no idle connections are retained.
max_idle_conns: {{ database.max_idle_conns }}
# The maximum number of open connections to the database. If it <= 0, then there is no limit on the number of open connections.
# Note: the default number of connections is 1024 for postgres of harbor.
max_open_conns: {{ database.max_open_conns }}
{% else %}
# The password for the root user of Harbor DB. Change this before any production use.
password: root123
# The maximum number of connections in the idle connection pool. If it <=0, no idle connections are retained.
max_idle_conns: 100
# The maximum number of open connections to the database. If it <= 0, then there is no limit on the number of open connections.
# Note: the default number of connections is 1024 for postgres of harbor.
max_open_conns: 900
{% endif %}
{% if data_volume is defined %}
# The default data volume
data_volume: {{ data_volume }}
{% else %}
# The default data volume
data_volume: /data
{% endif %}
# Harbor Storage settings by default is using /data dir on local filesystem
# Uncomment storage_service setting If you want to using external storage
{% if storage_service is defined %}
storage_service:
{% for key, value in storage_service.items() %}
{% if key == 'ca_bundle' %}
# # ca_bundle is the path to the custom root ca certificate, which will be injected into the truststore
# # of registry's and chart repository's containers. This is usually needed when the user hosts a internal storage with self signed certificate.
ca_bundle: {{ value if value is not none else '' }}
{% elif key == 'redirect' %}
# # set disable to true when you want to disable registry redirect
redirect:
{% if storage_service.redirect.disabled is defined %}
disable: {{ storage_service.redirect.disabled | lower}}
{% else %}
disable: {{ storage_service.redirect.disable | lower}}
{% endif %}
{% else %}
# # storage backend, default is filesystem, options include filesystem, azure, gcs, s3, swift and oss
# # for more info about this configuration please refer https://docs.docker.com/registry/configuration/
{{ key }}:
{% for k, v in value.items() %}
{{ k }}: {{ v if v is not none else '' }}
{% endfor %}
{% endif %}
{% endfor %}
{% else %}
# Harbor Storage settings by default is using /data dir on local filesystem
# Uncomment storage_service setting If you want to using external storage
# storage_service:
# # ca_bundle is the path to the custom root ca certificate, which will be injected into the truststore
# # of registry's and chart repository's containers. This is usually needed when the user hosts a internal storage with self signed certificate.
# ca_bundle:
# # storage backend, default is filesystem, options include filesystem, azure, gcs, s3, swift and oss
# # for more info about this configuration please refer https://docs.docker.com/registry/configuration/
# filesystem:
# maxthreads: 100
# # set disable to true when you want to disable registry redirect
# redirect:
# disable: false
{% endif %}
# Trivy configuration
#
# Trivy DB contains vulnerability information from NVD, Red Hat, and many other upstream vulnerability databases.
# It is downloaded by Trivy from the GitHub release page https://github.com/aquasecurity/trivy-db/releases and cached
# in the local file system. In addition, the database contains the update timestamp so Trivy can detect whether it
# should download a newer version from the Internet or use the cached one. Currently, the database is updated every
# 12 hours and published as a new release to GitHub.
{% if trivy is defined %}
trivy:
# ignoreUnfixed The flag to display only fixed vulnerabilities
{% if trivy.ignore_unfixed is defined %}
ignore_unfixed: {{ trivy.ignore_unfixed | lower }}
{% else %}
ignore_unfixed: false
{% endif %}
# timeout The duration to wait for scan completion
{% if trivy.timeout is defined %}
timeout: {{ trivy.timeout }}
{% else %}
timeout: 5m0s
{% endif %}
# skipUpdate The flag to enable or disable Trivy DB downloads from GitHub
#
# You might want to enable this flag in test or CI/CD environments to avoid GitHub rate limiting issues.
# If the flag is enabled you have to download the `trivy-offline.tar.gz` archive manually, extract `trivy.db` and
# `metadata.json` files and mount them in the `/home/scanner/.cache/trivy/db` path.
{% if trivy.skip_update is defined %}
skip_update: {{ trivy.skip_update | lower }}
{% else %}
skip_update: false
{% endif %}
#
{% if trivy.offline_scan is defined %}
offline_scan: {{ trivy.offline_scan | lower }}
{% else %}
offline_scan: false
{% endif %}
#
# Comma-separated list of what security issues to detect. Possible values are `vuln`, `config` and `secret`. Defaults to `vuln`.
{% if trivy.security_check is defined %}
security_check: {{ trivy.security_check }}
{% else %}
security_check: vuln
{% endif %}
#
# insecure The flag to skip verifying registry certificate
{% if trivy.insecure is defined %}
insecure: {{ trivy.insecure | lower }}
{% else %}
insecure: false
{% endif %}
# github_token The GitHub access token to download Trivy DB
#
# Anonymous downloads from GitHub are subject to the limit of 60 requests per hour. Normally such rate limit is enough
# for production operations. If, for any reason, it's not enough, you could increase the rate limit to 5000
# requests per hour by specifying the GitHub access token. For more details on GitHub rate limiting please consult
# https://developer.github.com/v3/#rate-limiting
#
# You can create a GitHub token by following the instructions in
# https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line
#
{% if trivy.github_token is defined %}
github_token: {{ trivy.github_token }}
{% else %}
# github_token: xxx
{% endif %}
{% else %}
# trivy:
# # ignoreUnfixed The flag to display only fixed vulnerabilities
# ignore_unfixed: false
# # skipUpdate The flag to enable or disable Trivy DB downloads from GitHub
# #
# # You might want to enable this flag in test or CI/CD environments to avoid GitHub rate limiting issues.
# # If the flag is enabled you have to download the `trivy-offline.tar.gz` archive manually, extract `trivy.db` and
# # `metadata.json` files and mount them in the `/home/scanner/.cache/trivy/db` path.
# skip_update: false
# #
# #The offline_scan option prevents Trivy from sending API requests to identify dependencies.
# # Scanning JAR files and pom.xml may require Internet access for better detection, but this option tries to avoid it.
# # For example, the offline mode will not try to resolve transitive dependencies in pom.xml when the dependency doesn't
# # exist in the local repositories. It means a number of detected vulnerabilities might be fewer in offline mode.
# # It would work if all the dependencies are in local.
# # This option doesnt affect DB download. You need to specify "skip-update" as well as "offline-scan" in an air-gapped environment.
# offline_scan: false
# #
# # insecure The flag to skip verifying registry certificate
# insecure: false
# # github_token The GitHub access token to download Trivy DB
# #
# # Anonymous downloads from GitHub are subject to the limit of 60 requests per hour. Normally such rate limit is enough
# # for production operations. If, for any reason, it's not enough, you could increase the rate limit to 5000
# # requests per hour by specifying the GitHub access token. For more details on GitHub rate limiting please consult
# # https://developer.github.com/v3/#rate-limiting
# #
# # You can create a GitHub token by following the instructions in
# # https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line
# #
# # github_token: xxx
{% endif %}
jobservice:
# Maximum number of job workers in job service
{% if jobservice is defined %}
max_job_workers: {{ jobservice.max_job_workers }}
# The jobLoggers backend name, only support "STD_OUTPUT", "FILE" and/or "DB"
{% if jobservice.job_loggers is defined %}
job_loggers:
{% for job_logger in jobservice.job_loggers %}
- {{job_logger}}
{% endfor %}
{% else %}
job_loggers:
- STD_OUTPUT
- FILE
# - DB
{% endif %}
# The jobLogger sweeper duration (ignored if `jobLogger` is `stdout`)
{% if jobservice.logger_sweeper_duration is defined %}
logger_sweeper_duration: {{ jobservice.logger_sweeper_duration }}
{% else %}
logger_sweeper_duration: 1
{% endif %}
{% else %}
max_job_workers: 10
# The jobLoggers backend name, only support "STD_OUTPUT", "FILE" and/or "DB"
job_loggers:
- STD_OUTPUT
- FILE
# - DB
# The jobLogger sweeper duration (ignored if `jobLogger` is `stdout`)
logger_sweeper_duration: 1
{% endif %}
notification:
# Maximum retry count for webhook job
{% if notification is defined %}
webhook_job_max_retry: {{ notification.webhook_job_max_retry}}
# HTTP client timeout for webhook job
{% if notification.webhook_job_http_client_timeout is defined %}
webhook_job_http_client_timeout: {{ notification.webhook_job_http_client_timeout }}
{% else %}
webhook_job_http_client_timeout: 3 #seconds
{% endif %}
{% else %}
webhook_job_max_retry: 3
# HTTP client timeout for webhook job
webhook_job_http_client_timeout: 3 #seconds
{% endif %}
# Log configurations
log:
# options are debug, info, warning, error, fatal
{% if log is defined %}
level: {{ log.level }}
# configs for logs in local storage
local:
# Log files are rotated log_rotate_count times before being removed. If count is 0, old versions are removed rather than rotated.
rotate_count: {{ log.local.rotate_count }}
# Log files are rotated only if they grow bigger than log_rotate_size bytes. If size is followed by k, the size is assumed to be in kilobytes.
# If the M is used, the size is in megabytes, and if G is used, the size is in gigabytes. So size 100, size 100k, size 100M and size 100G
# are all valid.
rotate_size: {{ log.local.rotate_size }}
# The directory on your host that store log
location: {{ log.local.location }}
{% if log.external_endpoint is defined %}
external_endpoint:
# protocol used to transmit log to external endpoint, options is tcp or udp
protocol: {{ log.external_endpoint.protocol }}
# The host of external endpoint
host: {{ log.external_endpoint.host }}
# Port of external endpoint
port: {{ log.external_endpoint.port }}
{% else %}
# Uncomment following lines to enable external syslog endpoint.
# external_endpoint:
# # protocol used to transmit log to external endpoint, options is tcp or udp
# protocol: tcp
# # The host of external endpoint
# host: localhost
# # Port of external endpoint
# port: 5140
{% endif %}
{% else %}
level: info
# configs for logs in local storage
local:
# Log files are rotated log_rotate_count times before being removed. If count is 0, old versions are removed rather than rotated.
rotate_count: 50
# Log files are rotated only if they grow bigger than log_rotate_size bytes. If size is followed by k, the size is assumed to be in kilobytes.
# If the M is used, the size is in megabytes, and if G is used, the size is in gigabytes. So size 100, size 100k, size 100M and size 100G
# are all valid.
rotate_size: 200M
# The directory on your host that store log
location: /var/log/harbor
# Uncomment following lines to enable external syslog endpoint.
# external_endpoint:
# # protocol used to transmit log to external endpoint, options is tcp or udp
# protocol: tcp
# # The host of external endpoint
# host: localhost
# # Port of external endpoint
# port: 5140
{% endif %}
#This attribute is for migrator to detect the version of the .cfg file, DO NOT MODIFY!
_version: 2.9.0
{% if external_database is defined %}
# Uncomment external_database if using external database.
external_database:
harbor:
host: {{ external_database.harbor.host }}
port: {{ external_database.harbor.port }}
db_name: {{ external_database.harbor.db_name }}
username: {{ external_database.harbor.username }}
password: {{ external_database.harbor.password }}
ssl_mode: {{ external_database.harbor.ssl_mode }}
max_idle_conns: {{ external_database.harbor.max_idle_conns}}
max_open_conns: {{ external_database.harbor.max_open_conns}}
{% else %}
# Uncomment external_database if using external database.
# external_database:
# harbor:
# host: harbor_db_host
# port: harbor_db_port
# db_name: harbor_db_name
# username: harbor_db_username
# password: harbor_db_password
# ssl_mode: disable
# max_idle_conns: 2
# max_open_conns: 0
{% endif %}
{% if redis is defined %}
redis:
# # db_index 0 is for core, it's unchangeable
{% if redis.registry_db_index is defined %}
registry_db_index: {{ redis.registry_db_index }}
{% else %}
# # registry_db_index: 1
{% endif %}
{% if redis.jobservice_db_index is defined %}
jobservice_db_index: {{ redis.jobservice_db_index }}
{% else %}
# # jobservice_db_index: 2
{% endif %}
{% if redis.trivy_db_index is defined %}
trivy_db_index: {{ redis.trivy_db_index }}
{% else %}
# # trivy_db_index: 5
{% endif %}
{% if redis.harbor_db_index is defined %}
harbor_db_index: {{ redis.harbor_db_index }}
{% else %}
# # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
# # harbor_db_index: 6
{% endif %}
{% if redis.cache_layer_db_index is defined %}
cache_layer_db_index: {{ redis.cache_layer_db_index }}
{% else %}
# # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
# # cache_layer_db_index: 7
{% endif %}
{% else %}
# Uncomment redis if need to customize redis db
# redis:
# # db_index 0 is for core, it's unchangeable
# # registry_db_index: 1
# # jobservice_db_index: 2
# # trivy_db_index: 5
# # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
# # harbor_db_index: 6
# # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
# # cache_layer_db_index: 7
{% endif %}
{% if external_redis is defined %}
external_redis:
# support redis, redis+sentinel
# host for redis: <host_redis>:<port_redis>
# host for redis+sentinel:
# <host_sentinel1>:<port_sentinel1>,<host_sentinel2>:<port_sentinel2>,<host_sentinel3>:<port_sentinel3>
host: {{ external_redis.host }}
password: {{ external_redis.password }}
# Redis AUTH command was extended in Redis 6, it is possible to use it in the two-arguments AUTH <username> <password> form.
{% if external_redis.username is defined %}
username: {{ external_redis.username }}
{% else %}
# username:
{% endif %}
# sentinel_master_set must be set to support redis+sentinel
#sentinel_master_set:
# db_index 0 is for core, it's unchangeable
registry_db_index: {{ external_redis.registry_db_index }}
jobservice_db_index: {{ external_redis.jobservice_db_index }}
trivy_db_index: 5
idle_timeout_seconds: 30
{% if external_redis.harbor_db_index is defined %}
harbor_db_index: {{ redis.harbor_db_index }}
{% else %}
# # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
# # harbor_db_index: 6
{% endif %}
{% if external_redis.cache_layer_db_index is defined %}
cache_layer_db_index: {{ redis.cache_layer_db_index }}
{% else %}
# # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
# # cache_layer_db_index: 7
{% endif %}
{% else %}
# Umcomments external_redis if using external Redis server
# external_redis:
# # support redis, redis+sentinel
# # host for redis: <host_redis>:<port_redis>
# # host for redis+sentinel:
# # <host_sentinel1>:<port_sentinel1>,<host_sentinel2>:<port_sentinel2>,<host_sentinel3>:<port_sentinel3>
# host: redis:6379
# password:
# # Redis AUTH command was extended in Redis 6, it is possible to use it in the two-arguments AUTH <username> <password> form.
# # username:
# # sentinel_master_set must be set to support redis+sentinel
# #sentinel_master_set:
# # db_index 0 is for core, it's unchangeable
# registry_db_index: 1
# jobservice_db_index: 2
# trivy_db_index: 5
# idle_timeout_seconds: 30
# # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
# # harbor_db_index: 6
# # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
# # cache_layer_db_index: 7
{% endif %}
{% if uaa is defined %}
# Uncomment uaa for trusting the certificate of uaa instance that is hosted via self-signed cert.
uaa:
ca_file: {{ uaa.ca_file }}
{% else %}
# Uncomment uaa for trusting the certificate of uaa instance that is hosted via self-signed cert.
# uaa:
# ca_file: /path/to/ca
{% endif %}
# Global proxy
# Config http proxy for components, e.g. http://my.proxy.com:3128
# Components doesn't need to connect to each others via http proxy.
# Remove component from `components` array if want disable proxy
# for it. If you want use proxy for replication, MUST enable proxy
# for core and jobservice, and set `http_proxy` and `https_proxy`.
# Add domain to the `no_proxy` field, when you want disable proxy
# for some special registry.
{% if proxy is defined %}
proxy:
http_proxy: {{ proxy.http_proxy or ''}}
https_proxy: {{ proxy.https_proxy or ''}}
no_proxy: {{ proxy.no_proxy or ''}}
{% if proxy.components is defined %}
components:
{% for component in proxy.components %}
{% if component != 'clair' %}
- {{component}}
{% endif %}
{% endfor %}
{% endif %}
{% else %}
proxy:
http_proxy:
https_proxy:
no_proxy:
components:
- core
- jobservice
- trivy
{% endif %}
{% if metric is defined %}
metric:
enabled: {{ metric.enabled }}
port: {{ metric.port }}
path: {{ metric.path }}
{% else %}
# metric:
# enabled: false
# port: 9090
# path: /metric
{% endif %}
# Trace related config
# only can enable one trace provider(jaeger or otel) at the same time,
# and when using jaeger as provider, can only enable it with agent mode or collector mode.
# if using jaeger collector mode, uncomment endpoint and uncomment username, password if needed
# if using jaeger agetn mode uncomment agent_host and agent_port
{% if trace is defined %}
trace:
enabled: {{ trace.enabled | lower}}
sample_rate: {{ trace.sample_rate }}
# # namespace used to diferenciate different harbor services
{% if trace.namespace is defined %}
namespace: {{ trace.namespace }}
{% else %}
# namespace:
{% endif %}
# # attributes is a key value dict contains user defined attributes used to initialize trace provider
{% if trace.attributes is defined%}
attributes:
{% for name, value in trace.attributes.items() %}
{{name}}: {{value}}
{% endfor %}
{% else %}
# attributes:
# application: harbor
{% endif %}
{% if trace.jaeger is defined%}
jaeger:
endpoint: {{trace.jaeger.endpoint or '' }}
username: {{trace.jaeger.username or ''}}
password: {{trace.jaeger.password or ''}}
agent_host: {{trace.jaeger.agent_host or ''}}
agent_port: {{trace.jaeger.agent_port or ''}}
{% else %}
# jaeger:
# endpoint:
# username:
# password:
# agent_host:
# agent_port:
{% endif %}
{% if trace. otel is defined %}
otel:
endpoint: {{trace.otel.endpoint or '' }}
url_path: {{trace.otel.url_path or '' }}
compression: {{trace.otel.compression | lower }}
insecure: {{trace.otel.insecure | lower }}
timeout: {{trace.otel.timeout or '' }}
{% else %}
# otel:
# endpoint: hostname:4318
# url_path: /v1/traces
# compression: false
# insecure: true
# # timeout is in seconds
# timeout: 10
{% endif%}
{% else %}
# trace:
# enabled: true
# # set sample_rate to 1 if you wanna sampling 100% of trace data; set 0.5 if you wanna sampling 50% of trace data, and so forth
# sample_rate: 1
# # # namespace used to differenciate different harbor services
# # namespace:
# # # attributes is a key value dict contains user defined attributes used to initialize trace provider
# # attributes:
# # application: harbor
# # jaeger:
# # endpoint: http://hostname:14268/api/traces
# # username:
# # password:
# # agent_host: hostname
# # agent_port: 6832
# # otel:
# # endpoint: hostname:4318
# # url_path: /v1/traces
# # compression: false
# # insecure: true
# # # timeout is in seconds
# # timeout: 10
{% endif %}
# enable purge _upload directories
{% if upload_purging is defined %}
upload_purging:
enabled: {{ upload_purging.enabled | lower}}
age: {{ upload_purging.age }}
interval: {{ upload_purging.interval }}
dryrun: {{ upload_purging.dryrun | lower}}
{% else %}
upload_purging:
enabled: true
# remove files in _upload directories which exist for a period of time, default is one week.
age: 168h
# the interval of the purge operations
interval: 24h
dryrun: false
{% endif %}
# Cache related config
{% if cache is defined %}
cache:
enabled: {{ cache.enabled | lower}}
expire_hours: {{ cache.expire_hours }}
{% else %}
cache:
enabled: false
expire_hours: 24
{% endif %}
# Harbor core configurations
{% if core is defined %}
core:
# The provider for updating project quota(usage), there are 2 options, redis or db,
# by default is implemented by db but you can switch the updation via redis which
# can improve the performance of high concurrent pushing to the same project,
# and reduce the database connections spike and occupies.
# By redis will bring up some delay for quota usage updation for display, so only
# suggest switch provider to redis if you were ran into the db connections spike aroud
# the scenario of high concurrent pushing to same project, no improvment for other scenes.
quota_update_provider: {{ core.quota_update_provider }}
{% else %}
# core:
# # The provider for updating project quota(usage), there are 2 options, redis or db,
# # by default is implemented by db but you can switch the updation via redis which
# # can improve the performance of high concurrent pushing to the same project,
# # and reduce the database connections spike and occupies.
# # By redis will bring up some delay for quota usage updation for display, so only
# # suggest switch provider to redis if you were ran into the db connections spike aroud
# # the scenario of high concurrent pushing to same project, no improvment for other scenes.
# quota_update_provider: redis # Or db
{% endif %}

View File

@ -1,4 +1,4 @@
FROM golang:1.21.3 FROM golang:1.21.4
ENV DISTRIBUTION_DIR /go/src/github.com/docker/distribution ENV DISTRIBUTION_DIR /go/src/github.com/docker/distribution
ENV BUILDTAGS include_oss include_gcs ENV BUILDTAGS include_oss include_gcs

View File

@ -1,4 +1,4 @@
FROM golang:1.21.3 FROM golang:1.21.4
ADD . /go/src/github.com/aquasecurity/harbor-scanner-trivy/ ADD . /go/src/github.com/aquasecurity/harbor-scanner-trivy/
WORKDIR /go/src/github.com/aquasecurity/harbor-scanner-trivy/ WORKDIR /go/src/github.com/aquasecurity/harbor-scanner-trivy/

View File

@ -19,7 +19,7 @@ TEMP=$(mktemp -d ${TMPDIR-/tmp}/trivy-adapter.XXXXXX)
git clone https://github.com/aquasecurity/harbor-scanner-trivy.git $TEMP git clone https://github.com/aquasecurity/harbor-scanner-trivy.git $TEMP
cd $TEMP; git checkout $VERSION; cd - cd $TEMP; git checkout $VERSION; cd -
echo "Building Trivy adapter binary based on golang:1.21.3..." echo "Building Trivy adapter binary based on golang:1.21.4..."
cp Dockerfile.binary $TEMP cp Dockerfile.binary $TEMP
docker build -f $TEMP/Dockerfile.binary -t trivy-adapter-golang $TEMP docker build -f $TEMP/Dockerfile.binary -t trivy-adapter-golang $TEMP

View File

@ -184,6 +184,7 @@ const (
TraceOtelTimeout = "trace_otel_timeout" TraceOtelTimeout = "trace_otel_timeout"
GDPRDeleteUser = "gdpr_delete_user" GDPRDeleteUser = "gdpr_delete_user"
GDPRAuditLogs = "gdpr_audit_logs"
// These variables are temporary solution for issue: https://github.com/goharbor/harbor/issues/16039 // These variables are temporary solution for issue: https://github.com/goharbor/harbor/issues/16039
// When user disable the pull count/time/audit log, it will decrease the database access, especially in large concurrency pull scenarios. // When user disable the pull count/time/audit log, it will decrease the database access, especially in large concurrency pull scenarios.
@ -230,4 +231,14 @@ const (
QuotaUpdateProvider = "quota_update_provider" QuotaUpdateProvider = "quota_update_provider"
// IllegalCharsInUsername is the illegal chars in username // IllegalCharsInUsername is the illegal chars in username
IllegalCharsInUsername = `,"~#%$` IllegalCharsInUsername = `,"~#%$`
// Beego web config
// BeegoMaxMemoryBytes is the max memory(bytes) of the beego web config
BeegoMaxMemoryBytes = "beego_max_memory_bytes"
// DefaultBeegoMaxMemoryBytes sets default max memory to 128GB
DefaultBeegoMaxMemoryBytes = 1 << 37
// BeegoMaxUploadSizeBytes is the max upload size(bytes) of the beego web config
BeegoMaxUploadSizeBytes = "beego_max_upload_size_bytes"
// DefaultBeegoMaxUploadSizeBytes sets default max upload size to 128GB
DefaultBeegoMaxUploadSizeBytes = 1 << 37
) )

View File

@ -85,11 +85,11 @@ var (
"System": { "System": {
{Resource: ResourceAuditLog, Action: ActionList}, {Resource: ResourceAuditLog, Action: ActionList},
{Resource: ResourcePreatPolicy, Action: ActionRead}, {Resource: ResourcePreatInstance, Action: ActionRead},
{Resource: ResourcePreatPolicy, Action: ActionCreate}, {Resource: ResourcePreatInstance, Action: ActionCreate},
{Resource: ResourcePreatPolicy, Action: ActionDelete}, {Resource: ResourcePreatInstance, Action: ActionDelete},
{Resource: ResourcePreatPolicy, Action: ActionList}, {Resource: ResourcePreatInstance, Action: ActionList},
{Resource: ResourcePreatPolicy, Action: ActionUpdate}, {Resource: ResourcePreatInstance, Action: ActionUpdate},
{Resource: ResourceProject, Action: ActionList}, {Resource: ResourceProject, Action: ActionList},
{Resource: ResourceProject, Action: ActionCreate}, {Resource: ResourceProject, Action: ActionCreate},
@ -102,9 +102,7 @@ var (
{Resource: ResourceReplication, Action: ActionRead}, {Resource: ResourceReplication, Action: ActionRead},
{Resource: ResourceReplication, Action: ActionCreate}, {Resource: ResourceReplication, Action: ActionCreate},
{Resource: ResourceReplication, Action: ActionDelete},
{Resource: ResourceReplication, Action: ActionList}, {Resource: ResourceReplication, Action: ActionList},
{Resource: ResourceReplication, Action: ActionUpdate},
{Resource: ResourceReplicationAdapter, Action: ActionList}, {Resource: ResourceReplicationAdapter, Action: ActionList},
@ -123,14 +121,12 @@ var (
{Resource: ResourceGarbageCollection, Action: ActionRead}, {Resource: ResourceGarbageCollection, Action: ActionRead},
{Resource: ResourceGarbageCollection, Action: ActionCreate}, {Resource: ResourceGarbageCollection, Action: ActionCreate},
{Resource: ResourceGarbageCollection, Action: ActionDelete},
{Resource: ResourceGarbageCollection, Action: ActionList}, {Resource: ResourceGarbageCollection, Action: ActionList},
{Resource: ResourceGarbageCollection, Action: ActionUpdate}, {Resource: ResourceGarbageCollection, Action: ActionUpdate},
{Resource: ResourceGarbageCollection, Action: ActionStop}, {Resource: ResourceGarbageCollection, Action: ActionStop},
{Resource: ResourcePurgeAuditLog, Action: ActionRead}, {Resource: ResourcePurgeAuditLog, Action: ActionRead},
{Resource: ResourcePurgeAuditLog, Action: ActionCreate}, {Resource: ResourcePurgeAuditLog, Action: ActionCreate},
{Resource: ResourcePurgeAuditLog, Action: ActionDelete},
{Resource: ResourcePurgeAuditLog, Action: ActionList}, {Resource: ResourcePurgeAuditLog, Action: ActionList},
{Resource: ResourcePurgeAuditLog, Action: ActionUpdate}, {Resource: ResourcePurgeAuditLog, Action: ActionUpdate},
{Resource: ResourcePurgeAuditLog, Action: ActionStop}, {Resource: ResourcePurgeAuditLog, Action: ActionStop},
@ -138,12 +134,6 @@ var (
{Resource: ResourceJobServiceMonitor, Action: ActionList}, {Resource: ResourceJobServiceMonitor, Action: ActionList},
{Resource: ResourceJobServiceMonitor, Action: ActionStop}, {Resource: ResourceJobServiceMonitor, Action: ActionStop},
{Resource: ResourceTagRetention, Action: ActionRead},
{Resource: ResourceTagRetention, Action: ActionCreate},
{Resource: ResourceTagRetention, Action: ActionDelete},
{Resource: ResourceTagRetention, Action: ActionList},
{Resource: ResourceTagRetention, Action: ActionUpdate},
{Resource: ResourceScanner, Action: ActionRead}, {Resource: ResourceScanner, Action: ActionRead},
{Resource: ResourceScanner, Action: ActionCreate}, {Resource: ResourceScanner, Action: ActionCreate},
{Resource: ResourceScanner, Action: ActionDelete}, {Resource: ResourceScanner, Action: ActionDelete},
@ -153,12 +143,8 @@ var (
{Resource: ResourceLabel, Action: ActionRead}, {Resource: ResourceLabel, Action: ActionRead},
{Resource: ResourceLabel, Action: ActionCreate}, {Resource: ResourceLabel, Action: ActionCreate},
{Resource: ResourceLabel, Action: ActionDelete}, {Resource: ResourceLabel, Action: ActionDelete},
{Resource: ResourceLabel, Action: ActionList},
{Resource: ResourceLabel, Action: ActionUpdate}, {Resource: ResourceLabel, Action: ActionUpdate},
{Resource: ResourceExportCVE, Action: ActionRead},
{Resource: ResourceExportCVE, Action: ActionCreate},
{Resource: ResourceSecurityHub, Action: ActionRead}, {Resource: ResourceSecurityHub, Action: ActionRead},
{Resource: ResourceSecurityHub, Action: ActionList}, {Resource: ResourceSecurityHub, Action: ActionList},
@ -178,9 +164,11 @@ var (
{Resource: ResourceMetadata, Action: ActionUpdate}, {Resource: ResourceMetadata, Action: ActionUpdate},
{Resource: ResourceRepository, Action: ActionRead}, {Resource: ResourceRepository, Action: ActionRead},
{Resource: ResourceRepository, Action: ActionCreate},
{Resource: ResourceRepository, Action: ActionList},
{Resource: ResourceRepository, Action: ActionUpdate}, {Resource: ResourceRepository, Action: ActionUpdate},
{Resource: ResourceRepository, Action: ActionDelete},
{Resource: ResourceRepository, Action: ActionList},
{Resource: ResourceRepository, Action: ActionPull},
{Resource: ResourceRepository, Action: ActionPush},
{Resource: ResourceArtifact, Action: ActionRead}, {Resource: ResourceArtifact, Action: ActionRead},
{Resource: ResourceArtifact, Action: ActionCreate}, {Resource: ResourceArtifact, Action: ActionCreate},
@ -197,7 +185,7 @@ var (
{Resource: ResourceAccessory, Action: ActionList}, {Resource: ResourceAccessory, Action: ActionList},
{Resource: ResourceArtifactAddition, Action: ActionCreate}, {Resource: ResourceArtifactAddition, Action: ActionRead},
{Resource: ResourceArtifactLabel, Action: ActionCreate}, {Resource: ResourceArtifactLabel, Action: ActionCreate},
{Resource: ResourceArtifactLabel, Action: ActionDelete}, {Resource: ResourceArtifactLabel, Action: ActionDelete},
@ -222,7 +210,17 @@ var (
{Resource: ResourceNotificationPolicy, Action: ActionList}, {Resource: ResourceNotificationPolicy, Action: ActionList},
{Resource: ResourceNotificationPolicy, Action: ActionUpdate}, {Resource: ResourceNotificationPolicy, Action: ActionUpdate},
{Resource: ResourceRegistry, Action: ActionPush}, {Resource: ResourceTagRetention, Action: ActionRead},
{Resource: ResourceTagRetention, Action: ActionCreate},
{Resource: ResourceTagRetention, Action: ActionDelete},
{Resource: ResourceTagRetention, Action: ActionList},
{Resource: ResourceTagRetention, Action: ActionUpdate},
{Resource: ResourceLabel, Action: ActionRead},
{Resource: ResourceLabel, Action: ActionCreate},
{Resource: ResourceLabel, Action: ActionDelete},
{Resource: ResourceLabel, Action: ActionList},
{Resource: ResourceLabel, Action: ActionUpdate},
}, },
} }
) )

View File

@ -21,12 +21,15 @@ import (
commonmodels "github.com/goharbor/harbor/src/common/models" commonmodels "github.com/goharbor/harbor/src/common/models"
"github.com/goharbor/harbor/src/common/security" "github.com/goharbor/harbor/src/common/security"
"github.com/goharbor/harbor/src/common/security/local" "github.com/goharbor/harbor/src/common/security/local"
"github.com/goharbor/harbor/src/jobservice/job"
"github.com/goharbor/harbor/src/jobservice/job/impl/gdpr"
"github.com/goharbor/harbor/src/lib" "github.com/goharbor/harbor/src/lib"
"github.com/goharbor/harbor/src/lib/config" "github.com/goharbor/harbor/src/lib/config"
"github.com/goharbor/harbor/src/lib/errors" "github.com/goharbor/harbor/src/lib/errors"
"github.com/goharbor/harbor/src/lib/q" "github.com/goharbor/harbor/src/lib/q"
"github.com/goharbor/harbor/src/pkg/member" "github.com/goharbor/harbor/src/pkg/member"
"github.com/goharbor/harbor/src/pkg/oidc" "github.com/goharbor/harbor/src/pkg/oidc"
"github.com/goharbor/harbor/src/pkg/task"
"github.com/goharbor/harbor/src/pkg/user" "github.com/goharbor/harbor/src/pkg/user"
"github.com/goharbor/harbor/src/pkg/user/models" "github.com/goharbor/harbor/src/pkg/user/models"
) )
@ -76,6 +79,8 @@ func NewController() Controller {
mgr: user.New(), mgr: user.New(),
oidcMetaMgr: oidc.NewMetaMgr(), oidcMetaMgr: oidc.NewMetaMgr(),
memberMgr: member.Mgr, memberMgr: member.Mgr,
taskMgr: task.NewManager(),
exeMgr: task.NewExecutionManager(),
} }
} }
@ -88,6 +93,8 @@ type controller struct {
mgr user.Manager mgr user.Manager
oidcMetaMgr oidc.MetaManager oidcMetaMgr oidc.MetaManager
memberMgr member.Manager memberMgr member.Manager
taskMgr task.Manager
exeMgr task.ExecutionManager
} }
func (c *controller) UpdateOIDCMeta(ctx context.Context, ou *commonmodels.OIDCUser, cols ...string) error { func (c *controller) UpdateOIDCMeta(ctx context.Context, ou *commonmodels.OIDCUser, cols ...string) error {
@ -183,10 +190,36 @@ func (c *controller) Delete(ctx context.Context, id int) error {
if err != nil { if err != nil {
return errors.UnknownError(err).WithMessage("failed to load GDPR setting: %v", err) return errors.UnknownError(err).WithMessage("failed to load GDPR setting: %v", err)
} }
if gdprSetting.DeleteUser {
return c.mgr.DeleteGDPR(ctx, id) if gdprSetting.AuditLogs {
userDb, err := c.mgr.Get(ctx, id)
if err != nil {
return errors.Wrap(err, "unable to get user information")
} }
return c.mgr.Delete(ctx, id) params := map[string]interface{}{
gdpr.UserNameParam: userDb.Username,
}
execID, err := c.exeMgr.Create(ctx, job.AuditLogsGDPRCompliantVendorType, -1, task.ExecutionTriggerEvent, params)
if err != nil {
return err
}
_, err = c.taskMgr.Create(ctx, execID, &task.Job{
Name: job.AuditLogsGDPRCompliantVendorType,
Metadata: &job.Metadata{
JobKind: job.KindGeneric,
},
Parameters: params,
})
if err != nil {
return err
}
}
if gdprSetting.DeleteUser {
err = c.mgr.DeleteGDPR(ctx, id)
} else {
err = c.mgr.Delete(ctx, id)
}
return err
} }
func (c *controller) List(ctx context.Context, query *q.Query, options ...models.Option) ([]*commonmodels.User, error) { func (c *controller) List(ctx context.Context, query *q.Query, options ...models.Option) ([]*commonmodels.User, error) {

View File

@ -127,8 +127,6 @@ func main() {
web.BConfig.WebConfig.Session.SessionOn = true web.BConfig.WebConfig.Session.SessionOn = true
web.BConfig.WebConfig.Session.SessionName = config.SessionCookieName web.BConfig.WebConfig.Session.SessionName = config.SessionCookieName
web.BConfig.MaxMemory = 1 << 35 // (32GB)
web.BConfig.MaxUploadSize = 1 << 35 // (32GB)
// the core db used for beego session // the core db used for beego session
redisCoreURL := os.Getenv("_REDIS_URL_CORE") redisCoreURL := os.Getenv("_REDIS_URL_CORE")
if len(redisCoreURL) > 0 { if len(redisCoreURL) > 0 {
@ -163,6 +161,12 @@ func main() {
log.Info("initializing configurations...") log.Info("initializing configurations...")
config.Init() config.Init()
log.Info("configurations initialization completed") log.Info("configurations initialization completed")
// default beego max memory and max upload size is 128GB, consider from some AI related image would be large,
// also support customize it from the environment variables if the default value cannot satisfy some scenarios.
web.BConfig.MaxMemory = config.GetBeegoMaxMemoryBytes()
web.BConfig.MaxUploadSize = config.GetBeegoMaxUploadSizeBytes()
metricCfg := config.Metric() metricCfg := config.Metric()
if metricCfg.Enabled { if metricCfg.Enabled {
metric.RegisterCollectors() metric.RegisterCollectors()

View File

@ -97,7 +97,6 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/denverdino/aliyungo v0.0.0-20191227032621-df38c6fa730c // indirect github.com/denverdino/aliyungo v0.0.0-20191227032621-df38c6fa730c // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/distribution/reference v0.5.0 // indirect
github.com/dnaeon/go-vcr v1.2.0 // indirect github.com/dnaeon/go-vcr v1.2.0 // indirect
github.com/docker/go-metrics v0.0.1 // indirect github.com/docker/go-metrics v0.0.1 // indirect
github.com/felixge/httpsnoop v1.0.3 // indirect github.com/felixge/httpsnoop v1.0.3 // indirect

View File

@ -37,6 +37,7 @@ dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7
github.com/Azure/azure-sdk-for-go v37.2.0+incompatible h1:LTdcd2GK+cv+e7yhWCN8S7yf3eblBypKFZsPfKjCQ7E= github.com/Azure/azure-sdk-for-go v37.2.0+incompatible h1:LTdcd2GK+cv+e7yhWCN8S7yf3eblBypKFZsPfKjCQ7E=
github.com/Azure/azure-sdk-for-go v37.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go v37.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
@ -76,6 +77,7 @@ github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0
github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g= github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
@ -160,25 +162,27 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dhui/dktest v0.3.16 h1:i6gq2YQEtcrjKbeJpBkWjE8MmLZPYllcjOFbTZuPDnw= github.com/dhui/dktest v0.3.16 h1:i6gq2YQEtcrjKbeJpBkWjE8MmLZPYllcjOFbTZuPDnw=
github.com/dhui/dktest v0.3.16/go.mod h1:gYaA3LRmM8Z4vJl2MA0THIigJoZrwOansEOsp+kqxp0=
github.com/distribution/distribution v2.8.2+incompatible h1:k9+4DKdOG+quPFZXT/mUsiQrGu9vYCp+dXpuPkuqhk8= github.com/distribution/distribution v2.8.2+incompatible h1:k9+4DKdOG+quPFZXT/mUsiQrGu9vYCp+dXpuPkuqhk8=
github.com/distribution/distribution v2.8.2+incompatible/go.mod h1:EgLm2NgWtdKgzF9NpMzUKgzmR7AMmb0VQi2B+ZzDRjc= github.com/distribution/distribution v2.8.2+incompatible/go.mod h1:EgLm2NgWtdKgzF9NpMzUKgzmR7AMmb0VQi2B+ZzDRjc=
github.com/distribution/distribution v2.8.3+incompatible h1:RlpEXBLq/WPXYvBYMDAmBX/SnhD67qwtvW/DzKc8pAo=
github.com/distribution/distribution v2.8.3+incompatible/go.mod h1:EgLm2NgWtdKgzF9NpMzUKgzmR7AMmb0VQi2B+ZzDRjc=
github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
github.com/docker/docker v20.10.24+incompatible h1:Ugvxm7a8+Gz6vqQYQQ2W7GYq5EUPaAiuPgIfVyI3dYE= github.com/docker/docker v20.10.24+incompatible h1:Ugvxm7a8+Gz6vqQYQQ2W7GYq5EUPaAiuPgIfVyI3dYE=
github.com/docker/docker v20.10.24+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8= github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8=
github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw= github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4=
github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE=
github.com/elazarl/go-bindata-assetfs v1.0.1 h1:m0kkaHRKEu7tUIUFVwhGGGYClXvyl4RE03qmvRTNfbw= github.com/elazarl/go-bindata-assetfs v1.0.1 h1:m0kkaHRKEu7tUIUFVwhGGGYClXvyl4RE03qmvRTNfbw=
github.com/elazarl/go-bindata-assetfs v1.0.1/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4=
github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ=
github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
@ -191,6 +195,7 @@ github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSw
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
@ -291,6 +296,7 @@ github.com/go-redis/redis/v8 v8.11.4 h1:kHoYkfZP6+pe04aFTnhDH6GDROa5yJdHJVNxV3F4
github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w= github.com/go-redis/redis/v8 v8.11.4/go.mod h1:2Z2wHZXdQpCDXEGzqMockDpNyYvi2l4Pxt6RJr792+w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
@ -368,6 +374,7 @@ github.com/gomodule/redigo v1.8.8/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs0
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54= github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54=
github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@ -380,6 +387,7 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -533,6 +541,7 @@ github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
@ -544,6 +553,7 @@ github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls=
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
@ -565,6 +575,7 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
@ -581,6 +592,7 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0= github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@ -591,7 +603,9 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8=
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=
@ -615,6 +629,7 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
github.com/onsi/gomega v1.23.0 h1:/oxKu9c2HVap+F3PfKort2Hw5DEU+HGlW8n+tguWsys= github.com/onsi/gomega v1.23.0 h1:/oxKu9c2HVap+F3PfKort2Hw5DEU+HGlW8n+tguWsys=
github.com/onsi/gomega v1.23.0/go.mod h1:Z/NWtiqwBrwUt4/2loMmHL63EDLnYHmVbuBpDr2vQAg=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b h1:YWuSjZCQAPM8UUBLkYUk1e+rZcvWHJmFb6i6rM44Xs8= github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b h1:YWuSjZCQAPM8UUBLkYUk1e+rZcvWHJmFb6i6rM44Xs8=
@ -672,6 +687,7 @@ github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
@ -684,6 +700,7 @@ github.com/shiena/ansicolor v0.0.0-20200904210342-c7312218db18/go.mod h1:nkxAfR/
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
@ -733,6 +750,7 @@ github.com/tencentcloud/tencentcloud-sdk-go v1.0.62/go.mod h1:asUz5BPXxgoPGaRgZa
github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4= github.com/tidwall/pretty v1.0.0 h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/vmihailenco/msgpack/v5 v5.0.0-rc.2 h1:ognci8XPlosGhIHK1OLYSpSpnlhSFeBklfe18zIEwcU= github.com/vmihailenco/msgpack/v5 v5.0.0-rc.2 h1:ognci8XPlosGhIHK1OLYSpSpnlhSFeBklfe18zIEwcU=
github.com/vmihailenco/msgpack/v5 v5.0.0-rc.2/go.mod h1:HVxBVPUK/+fZMonk4bi1islLa8V3cfnBug0+4dykPzo= github.com/vmihailenco/msgpack/v5 v5.0.0-rc.2/go.mod h1:HVxBVPUK/+fZMonk4bi1islLa8V3cfnBug0+4dykPzo=
github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc= github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc=
@ -872,6 +890,7 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk=
golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -1087,6 +1106,7 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.9.1 h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo= golang.org/x/tools v0.9.1 h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo=
golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -1217,6 +1237,7 @@ k8s.io/client-go v0.26.2/go.mod h1:u5EjOuSyBa09yqqyY7m3abZeovO/7D/WehVVlZ2qcqU=
k8s.io/klog/v2 v2.90.1 h1:m4bYOKall2MmOiRaR1J+We67Do7vm9KiQVlT96lnHUw= k8s.io/klog/v2 v2.90.1 h1:m4bYOKall2MmOiRaR1J+We67Do7vm9KiQVlT96lnHUw=
k8s.io/klog/v2 v2.90.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/klog/v2 v2.90.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0=
k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280 h1:+70TFaan3hfJzs+7VK2o+OGxg8HsuBr/5f6tVAjDu6E= k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280 h1:+70TFaan3hfJzs+7VK2o+OGxg8HsuBr/5f6tVAjDu6E=
k8s.io/kube-openapi v0.0.0-20221012153701-172d655c2280/go.mod h1:+Axhij7bCpeqhklhUTe3xmOn6bWxolyZEeyaFpjGtl4=
k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 h1:kmDqav+P+/5e1i9tFfHq1qcF3sOrDp+YEkVDAHu7Jwk= k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5 h1:kmDqav+P+/5e1i9tFfHq1qcF3sOrDp+YEkVDAHu7Jwk=
k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= k8s.io/utils v0.0.0-20230220204549-a5ecb0141aa5/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=

View File

@ -0,0 +1,85 @@
// Copyright Project Harbor Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gdpr
import (
"fmt"
"github.com/goharbor/harbor/src/jobservice/job"
"github.com/goharbor/harbor/src/lib/errors"
"github.com/goharbor/harbor/src/pkg/audit"
"github.com/goharbor/harbor/src/pkg/user"
)
const UserNameParam = "username"
type AuditLogsDataMasking struct {
manager audit.Manager
userManager user.Manager
}
func (a AuditLogsDataMasking) MaxFails() uint {
return 3
}
func (a AuditLogsDataMasking) MaxCurrency() uint {
return 1
}
func (a AuditLogsDataMasking) ShouldRetry() bool {
return true
}
func (a AuditLogsDataMasking) Validate(params job.Parameters) error {
if params == nil {
// Params are required
return errors.New("missing job parameters")
}
_, err := a.parseParams(params)
return err
}
func (a *AuditLogsDataMasking) init() {
if a.manager == nil {
a.manager = audit.New()
}
if a.userManager == nil {
a.userManager = user.New()
}
}
func (a AuditLogsDataMasking) Run(ctx job.Context, params job.Parameters) error {
logger := ctx.GetLogger()
logger.Info("GDPR audit logs data masking job started")
a.init()
username, err := a.parseParams(params)
if err != nil {
return err
}
logger.Infof("Masking log entries for a user: %s", username)
return a.manager.UpdateUsername(ctx.SystemContext(), username, a.userManager.GenerateCheckSum(username))
}
func (a AuditLogsDataMasking) parseParams(params job.Parameters) (string, error) {
value, exist := params[UserNameParam]
if !exist {
return "", fmt.Errorf("param %s not found", UserNameParam)
}
str, ok := value.(string)
if !ok {
return "", fmt.Errorf("the value of %s isn't string", UserNameParam)
}
return str, nil
}

View File

@ -0,0 +1,67 @@
// Copyright Project Harbor Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gdpr
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/goharbor/harbor/src/jobservice/job"
mockjobservice "github.com/goharbor/harbor/src/testing/jobservice"
"github.com/goharbor/harbor/src/testing/pkg/audit"
"github.com/goharbor/harbor/src/testing/pkg/user"
)
func TestAuditLogsCleanupJobShouldRetry(t *testing.T) {
rep := &AuditLogsDataMasking{}
assert.True(t, rep.ShouldRetry())
}
func TestAuditLogsCleanupJobValidateParams(t *testing.T) {
const validUsername = "user"
var (
manager = &audit.Manager{}
userManager = &user.Manager{}
)
rep := &AuditLogsDataMasking{
manager: manager,
userManager: userManager,
}
err := rep.Validate(nil)
// parameters are required
assert.Error(t, err)
err = rep.Validate(job.Parameters{})
// no required username parameter
assert.Error(t, err)
validParams := job.Parameters{
"username": "user",
}
err = rep.Validate(validParams)
// parameters are valid
assert.Nil(t, err)
ctx := &mockjobservice.MockJobContext{}
logger := &mockjobservice.MockJobLogger{}
ctx.On("GetLogger").Return(logger)
userManager.On("GenerateCheckSum", validUsername).Return("hash")
manager.On("UpdateUsername", context.TODO(), validUsername, "hash").Return(nil)
err = rep.Run(ctx, validParams)
assert.Nil(t, err)
}

View File

@ -44,6 +44,8 @@ const (
ExecSweepVendorType = "EXECUTION_SWEEP" ExecSweepVendorType = "EXECUTION_SWEEP"
// ScanAllVendorType: the name of the scan all job // ScanAllVendorType: the name of the scan all job
ScanAllVendorType = "SCAN_ALL" ScanAllVendorType = "SCAN_ALL"
// AuditLogsGDPRCompliantVendorType : the name of the job which makes audit logs table GDPR-compliant
AuditLogsGDPRCompliantVendorType = "AUDIT_LOGS_GDPR_COMPLIANT"
) )
var ( var (

View File

@ -24,6 +24,8 @@ import (
"syscall" "syscall"
"time" "time"
"github.com/goharbor/harbor/src/jobservice/job/impl/gdpr"
"github.com/gomodule/redigo/redis" "github.com/gomodule/redigo/redis"
"github.com/goharbor/harbor/src/jobservice/api" "github.com/goharbor/harbor/src/jobservice/api"
@ -332,6 +334,7 @@ func (bs *Bootstrap) loadAndRunRedisWorkerPool(
"IMAGE_SCAN_ALL": (*legacy.ScanAllScheduler)(nil), "IMAGE_SCAN_ALL": (*legacy.ScanAllScheduler)(nil),
job.SystemArtifactCleanupVendorType: (*systemartifact.Cleanup)(nil), job.SystemArtifactCleanupVendorType: (*systemartifact.Cleanup)(nil),
job.ExecSweepVendorType: (*task.SweepJob)(nil), job.ExecSweepVendorType: (*task.SweepJob)(nil),
job.AuditLogsGDPRCompliantVendorType: (*gdpr.AuditLogsDataMasking)(nil),
}); err != nil { }); err != nil {
// exit // exit
return nil, err return nil, err

View File

@ -14,7 +14,11 @@
package metadata package metadata
import "github.com/goharbor/harbor/src/common" import (
"fmt"
"github.com/goharbor/harbor/src/common"
)
// Item - Configure item include default value, type, env name // Item - Configure item include default value, type, env name
type Item struct { type Item struct {
@ -181,6 +185,7 @@ var (
{Name: common.CacheExpireHours, Scope: SystemScope, Group: BasicGroup, EnvKey: "CACHE_EXPIRE_HOURS", DefaultValue: "24", ItemType: &IntType{}, Editable: false, Description: `The expire hours for cache`}, {Name: common.CacheExpireHours, Scope: SystemScope, Group: BasicGroup, EnvKey: "CACHE_EXPIRE_HOURS", DefaultValue: "24", ItemType: &IntType{}, Editable: false, Description: `The expire hours for cache`},
{Name: common.GDPRDeleteUser, Scope: SystemScope, Group: GDPRGroup, EnvKey: "GDPR_DELETE_USER", DefaultValue: "false", ItemType: &BoolType{}, Editable: false, Description: `The flag indicates if a user should be deleted compliant with GDPR.`}, {Name: common.GDPRDeleteUser, Scope: SystemScope, Group: GDPRGroup, EnvKey: "GDPR_DELETE_USER", DefaultValue: "false", ItemType: &BoolType{}, Editable: false, Description: `The flag indicates if a user should be deleted compliant with GDPR.`},
{Name: common.GDPRAuditLogs, Scope: SystemScope, Group: GDPRGroup, EnvKey: "GDPR_AUDIT_LOGS", DefaultValue: "false", ItemType: &BoolType{}, Editable: false, Description: `The flag indicates if an audit logs of a deleted user should be GDPR compliant.`},
{Name: common.AuditLogForwardEndpoint, Scope: UserScope, Group: BasicGroup, EnvKey: "AUDIT_LOG_FORWARD_ENDPOINT", DefaultValue: "", ItemType: &StringType{}, Editable: false, Description: `The endpoint to forward the audit log.`}, {Name: common.AuditLogForwardEndpoint, Scope: UserScope, Group: BasicGroup, EnvKey: "AUDIT_LOG_FORWARD_ENDPOINT", DefaultValue: "", ItemType: &StringType{}, Editable: false, Description: `The endpoint to forward the audit log.`},
{Name: common.SkipAuditLogDatabase, Scope: UserScope, Group: BasicGroup, EnvKey: "SKIP_LOG_AUDIT_DATABASE", DefaultValue: "false", ItemType: &BoolType{}, Editable: false, Description: `The option to skip audit log in database`}, {Name: common.SkipAuditLogDatabase, Scope: UserScope, Group: BasicGroup, EnvKey: "SKIP_LOG_AUDIT_DATABASE", DefaultValue: "false", ItemType: &BoolType{}, Editable: false, Description: `The option to skip audit log in database`},
@ -192,5 +197,8 @@ var (
{Name: common.BannerMessage, Scope: UserScope, Group: BasicGroup, EnvKey: "BANNER_MESSAGE", DefaultValue: "", ItemType: &StringType{}, Editable: true, Description: `The customized banner message for the UI`}, {Name: common.BannerMessage, Scope: UserScope, Group: BasicGroup, EnvKey: "BANNER_MESSAGE", DefaultValue: "", ItemType: &StringType{}, Editable: true, Description: `The customized banner message for the UI`},
{Name: common.QuotaUpdateProvider, Scope: SystemScope, Group: BasicGroup, EnvKey: "QUOTA_UPDATE_PROVIDER", DefaultValue: "db", ItemType: &StringType{}, Editable: false, Description: `The provider for updating quota, 'db' or 'redis' is supported`}, {Name: common.QuotaUpdateProvider, Scope: SystemScope, Group: BasicGroup, EnvKey: "QUOTA_UPDATE_PROVIDER", DefaultValue: "db", ItemType: &StringType{}, Editable: false, Description: `The provider for updating quota, 'db' or 'redis' is supported`},
{Name: common.BeegoMaxMemoryBytes, Scope: SystemScope, Group: BasicGroup, EnvKey: "BEEGO_MAX_MEMORY_BYTES", DefaultValue: fmt.Sprintf("%d", common.DefaultBeegoMaxMemoryBytes), ItemType: &Int64Type{}, Editable: false, Description: `The bytes for limiting the beego max memory, default is 128GB`},
{Name: common.BeegoMaxUploadSizeBytes, Scope: SystemScope, Group: BasicGroup, EnvKey: "BEEGO_MAX_UPLOAD_SIZE_BYTES", DefaultValue: fmt.Sprintf("%d", common.DefaultBeegoMaxUploadSizeBytes), ItemType: &Int64Type{}, Editable: false, Description: `The bytes for limiting the beego max upload size, default it 128GB`},
} }
) )

View File

@ -98,4 +98,5 @@ type GroupConf struct {
type GDPRSetting struct { type GDPRSetting struct {
DeleteUser bool `json:"user_delete,omitempty"` DeleteUser bool `json:"user_delete,omitempty"`
AuditLogs bool `json:"audit_logs"`
} }

View File

@ -132,6 +132,16 @@ func GetQuotaUpdateProvider() string {
return DefaultMgr().Get(backgroundCtx, common.QuotaUpdateProvider).GetString() return DefaultMgr().Get(backgroundCtx, common.QuotaUpdateProvider).GetString()
} }
// GetBeegoMaxMemoryBytes returns the max memory bytes of beego config
func GetBeegoMaxMemoryBytes() int64 {
return DefaultMgr().Get(backgroundCtx, common.BeegoMaxMemoryBytes).GetInt64()
}
// GetBeegoMaxUploadSizeBytes returns the max upload size bytes of beego config
func GetBeegoMaxUploadSizeBytes() int64 {
return DefaultMgr().Get(backgroundCtx, common.BeegoMaxUploadSizeBytes).GetInt64()
}
// WithTrivy returns a bool value to indicate if Harbor's deployed with Trivy. // WithTrivy returns a bool value to indicate if Harbor's deployed with Trivy.
func WithTrivy() bool { func WithTrivy() bool {
return DefaultMgr().Get(backgroundCtx, common.WithTrivy).GetBool() return DefaultMgr().Get(backgroundCtx, common.WithTrivy).GetBool()

View File

@ -186,6 +186,7 @@ func GDPRSetting(ctx context.Context) (*cfgModels.GDPRSetting, error) {
} }
return &cfgModels.GDPRSetting{ return &cfgModels.GDPRSetting{
DeleteUser: DefaultMgr().Get(ctx, common.GDPRDeleteUser).GetBool(), DeleteUser: DefaultMgr().Get(ctx, common.GDPRDeleteUser).GetBool(),
AuditLogs: DefaultMgr().Get(ctx, common.GDPRAuditLogs).GetBool(),
}, nil }, nil
} }

View File

@ -73,6 +73,10 @@ const (
tagged = `=id AND EXISTS ( tagged = `=id AND EXISTS (
SELECT 1 FROM tag WHERE tag.artifact_id = T0.id SELECT 1 FROM tag WHERE tag.artifact_id = T0.id
)` )`
// accessory filter: filter out the accessory
notacc = `=id AND NOT EXISTS (
SELECT 1 FROM artifact_accessory aa WHERE aa.artifact_id = T0.id
)`
) )
// New returns an instance of the default DAO // New returns an instance of the default DAO
@ -416,6 +420,7 @@ func setAccessoryQuery(qs beegoorm.QuerySeter, query *q.Query) (beegoorm.QuerySe
if query == nil { if query == nil {
return qs, nil return qs, nil
} }
qs = qs.FilterRaw("id", "not in (select artifact_id from artifact_accessory)")
qs = qs.FilterRaw("id", notacc)
return qs, nil return qs, nil
} }

View File

@ -42,6 +42,8 @@ type DAO interface {
Delete(ctx context.Context, id int64) (err error) Delete(ctx context.Context, id int64) (err error)
// Purge the audit log // Purge the audit log
Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error) Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error)
// UpdateUsername replaces username in matched records
UpdateUsername(ctx context.Context, username string, usernameReplace string) error
} }
// New returns an instance of the default DAO // New returns an instance of the default DAO
@ -57,6 +59,15 @@ var allowedMaps = map[string]interface{}{
type dao struct{} type dao struct{}
func (d *dao) UpdateUsername(ctx context.Context, username string, usernameReplace string) error {
o, err := orm.FromContext(ctx)
if err != nil {
return err
}
_, err = o.Raw("UPDATE audit_log SET username = ? WHERE username = ?", usernameReplace, username).Exec()
return err
}
// Purge delete expired audit log // Purge delete expired audit log
func (*dao) Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error) { func (*dao) Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error) {
ormer, err := orm.FromContext(ctx) ormer, err := orm.FromContext(ctx)

View File

@ -40,6 +40,8 @@ type Manager interface {
Delete(ctx context.Context, id int64) (err error) Delete(ctx context.Context, id int64) (err error)
// Purge delete the audit log with retention hours // Purge delete the audit log with retention hours
Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error) Purge(ctx context.Context, retentionHour int, includeOperations []string, dryRun bool) (int64, error)
// UpdateUsername Replace all log records username with its hash
UpdateUsername(ctx context.Context, username string, replaceWith string) error
} }
// New returns a default implementation of Manager // New returns a default implementation of Manager
@ -53,6 +55,10 @@ type manager struct {
dao dao.DAO dao dao.DAO
} }
func (m *manager) UpdateUsername(ctx context.Context, username string, replaceWith string) error {
return m.dao.UpdateUsername(ctx, username, replaceWith)
}
// Count ... // Count ...
func (m *manager) Count(ctx context.Context, query *q.Query) (int64, error) { func (m *manager) Count(ctx context.Context, query *q.Query) (int64, error) {
return m.dao.Count(ctx, query) return m.dao.Count(ctx, query)

View File

@ -63,6 +63,8 @@ type Manager interface {
// put the id in the pointer of user model, if it does exist, return the user's profile. // put the id in the pointer of user model, if it does exist, return the user's profile.
// This is used for ldap and uaa authentication, such the user can have an ID in Harbor. // This is used for ldap and uaa authentication, such the user can have an ID in Harbor.
Onboard(ctx context.Context, user *commonmodels.User) error Onboard(ctx context.Context, user *commonmodels.User) error
// GenerateCheckSum generates truncated crc32 checksum from a given string
GenerateCheckSum(in string) string
} }
// New returns a default implementation of Manager // New returns a default implementation of Manager
@ -111,9 +113,9 @@ func (m *manager) DeleteGDPR(ctx context.Context, id int) error {
if err != nil { if err != nil {
return err return err
} }
u.Username = fmt.Sprintf("%s#%d", checkSum(u.Username), u.UserID) u.Username = fmt.Sprintf("%s#%d", m.GenerateCheckSum(u.Username), u.UserID)
u.Email = fmt.Sprintf("%s#%d", checkSum(u.Email), u.UserID) u.Email = fmt.Sprintf("%s#%d", m.GenerateCheckSum(u.Email), u.UserID)
u.Realname = fmt.Sprintf("%s#%d", checkSum(u.Realname), u.UserID) u.Realname = fmt.Sprintf("%s#%d", m.GenerateCheckSum(u.Realname), u.UserID)
u.Deleted = true u.Deleted = true
return m.dao.Update(ctx, u, "username", "email", "realname", "deleted") return m.dao.Update(ctx, u, "username", "email", "realname", "deleted")
} }
@ -231,13 +233,14 @@ func excludeDefaultAdmin(query *q.Query) (qu *q.Query) {
return query return query
} }
// GenerateCheckSum generates checksum for a given string
func (m *manager) GenerateCheckSum(str string) string {
return fmt.Sprintf("%08x", crc32.Checksum([]byte(str), crc32.IEEETable))
}
func injectPasswd(u *commonmodels.User, password string) { func injectPasswd(u *commonmodels.User, password string) {
salt := utils.GenerateRandomString() salt := utils.GenerateRandomString()
u.Password = utils.Encrypt(password, salt, utils.SHA256) u.Password = utils.Encrypt(password, salt, utils.SHA256)
u.Salt = salt u.Salt = salt
u.PasswordVersion = utils.SHA256 u.PasswordVersion = utils.SHA256
} }
func checkSum(str string) string {
return fmt.Sprintf("%08x", crc32.Checksum([]byte(str), crc32.IEEETable))
}

View File

@ -65,9 +65,9 @@ func (m *mgrTestSuite) TestUserDeleteGDPR() {
m.dao.On("Update", mock.Anything, testifymock.MatchedBy( m.dao.On("Update", mock.Anything, testifymock.MatchedBy(
func(u *models.User) bool { func(u *models.User) bool {
return u.UserID == 123 && return u.UserID == 123 &&
u.Email == fmt.Sprintf("%s#%d", checkSum("existing@mytest.com"), existingUser.UserID) && u.Email == fmt.Sprintf("%s#%d", m.mgr.GenerateCheckSum("existing@mytest.com"), existingUser.UserID) &&
u.Username == fmt.Sprintf("%s#%d", checkSum("existing"), existingUser.UserID) && u.Username == fmt.Sprintf("%s#%d", m.mgr.GenerateCheckSum("existing"), existingUser.UserID) &&
u.Realname == fmt.Sprintf("%s#%d", checkSum("RealName"), existingUser.UserID) && u.Realname == fmt.Sprintf("%s#%d", m.mgr.GenerateCheckSum("RealName"), existingUser.UserID) &&
u.Deleted == true u.Deleted == true
}), }),
"username", "username",

View File

@ -88,6 +88,8 @@ export class RobotPermissionsPanelComponent
this.candidateActions.push(item?.action); this.candidateActions.push(item?.action);
} }
}); });
this.candidateActions.sort();
this.candidateResources.sort();
} }
isCandidate(resource: string, action: string): boolean { isCandidate(resource: string, action: string): boolean {

View File

@ -32,6 +32,7 @@ import (
"github.com/goharbor/harbor/src/lib/config" "github.com/goharbor/harbor/src/lib/config"
"github.com/goharbor/harbor/src/lib/errors" "github.com/goharbor/harbor/src/lib/errors"
"github.com/goharbor/harbor/src/lib/log" "github.com/goharbor/harbor/src/lib/log"
"github.com/goharbor/harbor/src/pkg/permission/types"
pkg "github.com/goharbor/harbor/src/pkg/robot/model" pkg "github.com/goharbor/harbor/src/pkg/robot/model"
"github.com/goharbor/harbor/src/server/v2.0/handler/model" "github.com/goharbor/harbor/src/server/v2.0/handler/model"
"github.com/goharbor/harbor/src/server/v2.0/models" "github.com/goharbor/harbor/src/server/v2.0/models"
@ -296,6 +297,28 @@ func (rAPI *robotAPI) validate(d int64, level string, permissions []*models.Robo
if level == robot.LEVELPROJECT && len(permissions) > 1 { if level == robot.LEVELPROJECT && len(permissions) > 1 {
return errors.New(nil).WithMessage("bad request permission").WithCode(errors.BadRequestCode) return errors.New(nil).WithMessage("bad request permission").WithCode(errors.BadRequestCode)
} }
// to validate the access scope
for _, perm := range permissions {
if perm.Kind == robot.LEVELSYSTEM {
polices := rbac.PoliciesMap["System"]
for _, acc := range perm.Access {
if !containsAccess(polices, acc) {
return errors.New(nil).WithMessage("bad request permission: %s:%s", acc.Resource, acc.Action).WithCode(errors.BadRequestCode)
}
}
} else if perm.Kind == robot.LEVELPROJECT {
polices := rbac.PoliciesMap["Project"]
for _, acc := range perm.Access {
if !containsAccess(polices, acc) {
return errors.New(nil).WithMessage("bad request permission: %s:%s", acc.Resource, acc.Action).WithCode(errors.BadRequestCode)
}
}
} else {
return errors.New(nil).WithMessage("bad request permission level: %s", perm.Kind).WithCode(errors.BadRequestCode)
}
}
return nil return nil
} }
@ -364,3 +387,12 @@ func validateName(name string) error {
} }
return nil return nil
} }
func containsAccess(policies []*types.Policy, item *models.Access) bool {
for _, po := range policies {
if po.Resource.String() == item.Resource && po.Action.String() == item.Action {
return true
}
}
return false
}

View File

@ -1,6 +1,8 @@
package handler package handler
import ( import (
"github.com/goharbor/harbor/src/common/rbac"
"github.com/goharbor/harbor/src/server/v2.0/models"
"math" "math"
"testing" "testing"
) )
@ -129,3 +131,79 @@ func TestValidateName(t *testing.T) {
}) })
} }
} }
func TestContainsAccess(t *testing.T) {
system := rbac.PoliciesMap["System"]
systests := []struct {
name string
acc *models.Access
expected bool
}{
{"System ResourceRegistry push",
&models.Access{
Resource: rbac.ResourceRegistry.String(),
Action: rbac.ActionPush.String(),
},
false,
},
{"System ResourceProject delete",
&models.Access{
Resource: rbac.ResourceProject.String(),
Action: rbac.ActionDelete.String(),
},
false,
},
{"System ResourceReplicationPolicy delete",
&models.Access{
Resource: rbac.ResourceReplicationPolicy.String(),
Action: rbac.ActionDelete.String(),
},
true,
},
}
for _, tt := range systests {
t.Run(tt.name, func(t *testing.T) {
ok := containsAccess(system, tt.acc)
if ok != tt.expected {
t.Errorf("name: %s, containsAccess() = %#v, want %#v", tt.name, tt.acc, tt.expected)
}
})
}
project := rbac.PoliciesMap["Project"]
protests := []struct {
name string
acc *models.Access
expected bool
}{
{"Project ResourceLog delete",
&models.Access{
Resource: rbac.ResourceLog.String(),
Action: rbac.ActionDelete.String(),
},
false,
},
{"Project ResourceMetadata read",
&models.Access{
Resource: rbac.ResourceMetadata.String(),
Action: rbac.ActionRead.String(),
},
true,
},
{"Project ResourceRobot create",
&models.Access{
Resource: rbac.ResourceRobot.String(),
Action: rbac.ActionCreate.String(),
},
false,
},
}
for _, tt := range protests {
t.Run(tt.name, func(t *testing.T) {
ok := containsAccess(project, tt.acc)
if ok != tt.expected {
t.Errorf("name: %s, containsAccess() = %#v, want %#v", tt.name, tt.acc, tt.expected)
}
})
}
}

View File

@ -155,6 +155,20 @@ func (_m *DAO) Purge(ctx context.Context, retentionHour int, includeOperations [
return r0, r1 return r0, r1
} }
// UpdateUsername provides a mock function with given fields: ctx, username, usernameReplace
func (_m *DAO) UpdateUsername(ctx context.Context, username string, usernameReplace string) error {
ret := _m.Called(ctx, username, usernameReplace)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, username, usernameReplace)
} else {
r0 = ret.Error(0)
}
return r0
}
// NewDAO creates a new instance of DAO. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // NewDAO creates a new instance of DAO. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value. // The first argument is typically a *testing.T value.
func NewDAO(t interface { func NewDAO(t interface {

View File

@ -154,6 +154,20 @@ func (_m *Manager) Purge(ctx context.Context, retentionHour int, includeOperatio
return r0, r1 return r0, r1
} }
// UpdateUsername provides a mock function with given fields: ctx, username, replaceWith
func (_m *Manager) UpdateUsername(ctx context.Context, username string, replaceWith string) error {
ret := _m.Called(ctx, username, replaceWith)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, username, replaceWith)
} else {
r0 = ret.Error(0)
}
return r0
}
// NewManager creates a new instance of Manager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // NewManager creates a new instance of Manager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value. // The first argument is typically a *testing.T value.
func NewManager(t interface { func NewManager(t interface {

View File

@ -102,6 +102,20 @@ func (_m *Manager) DeleteGDPR(ctx context.Context, id int) error {
return r0 return r0
} }
// GenerateCheckSum provides a mock function with given fields: in
func (_m *Manager) GenerateCheckSum(in string) string {
ret := _m.Called(in)
var r0 string
if rf, ok := ret.Get(0).(func(string) string); ok {
r0 = rf(in)
} else {
r0 = ret.Get(0).(string)
}
return r0
}
// Get provides a mock function with given fields: ctx, id // Get provides a mock function with given fields: ctx, id
func (_m *Manager) Get(ctx context.Context, id int) (*commonmodels.User, error) { func (_m *Manager) Get(ctx context.Context, id int) (*commonmodels.User, error) {
ret := _m.Called(ctx, id) ret := _m.Called(ctx, id)

View File

@ -21,8 +21,8 @@ class Robot(base.Base, object):
base._assert_status_code(200, status_code) base._assert_status_code(200, status_code)
return body return body
def create_access_list(self, right_map = [True] * 10): def create_access_list(self, right_map = [True] * 7):
_assert_status_code(10, len(right_map), r"Please input full access list for system robot account. Expected {}, while actual input count is {}.") _assert_status_code(7, len(right_map), r"Please input full access list for system robot account. Expected {}, while actual input count is {}.")
action_pull = "pull" action_pull = "pull"
action_push = "push" action_push = "push"
action_read = "read" action_read = "read"
@ -33,9 +33,6 @@ class Robot(base.Base, object):
("repository", action_pull), ("repository", action_pull),
("repository", action_push), ("repository", action_push),
("artifact", action_del), ("artifact", action_del),
("helm-chart", action_read),
("helm-chart-version", action_create),
("helm-chart-version", action_del),
("tag", action_create), ("tag", action_create),
("tag", action_del), ("tag", action_del),
("artifact-label", action_create), ("artifact-label", action_create),
@ -50,8 +47,7 @@ class Robot(base.Base, object):
return access_list return access_list
def create_project_robot(self, project_name, duration, robot_name = None, robot_desc = None, def create_project_robot(self, project_name, duration, robot_name = None, robot_desc = None,
has_pull_right = True, has_push_right = True, has_chart_read_right = True, has_pull_right = True, has_push_right = True, expect_status_code = 201, expect_response_body = None,
has_chart_create_right = True, expect_status_code = 201, expect_response_body = None,
**kwargs): **kwargs):
if robot_name is None: if robot_name is None:
robot_name = base._random_name("robot") robot_name = base._random_name("robot")
@ -62,20 +58,12 @@ class Robot(base.Base, object):
access_list = [] access_list = []
action_pull = "pull" action_pull = "pull"
action_push = "push" action_push = "push"
action_read = "read"
action_create = "create"
if has_pull_right is True: if has_pull_right is True:
robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_pull) robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_pull)
access_list.append(robotAccountAccess) access_list.append(robotAccountAccess)
if has_push_right is True: if has_push_right is True:
robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_push) robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_push)
access_list.append(robotAccountAccess) access_list.append(robotAccountAccess)
if has_chart_read_right is True:
robotAccountAccess = v2_swagger_client.Access(resource = "helm-chart", action = action_read)
access_list.append(robotAccountAccess)
if has_chart_create_right is True:
robotAccountAccess = v2_swagger_client.Access(resource = "helm-chart-version", action = action_create)
access_list.append(robotAccountAccess)
robotaccountPermissions = v2_swagger_client.RobotPermission(kind = "project", namespace = project_name, access = access_list) robotaccountPermissions = v2_swagger_client.RobotPermission(kind = "project", namespace = project_name, access = access_list)
permission_list = [] permission_list = []

View File

@ -33,7 +33,7 @@ class TestJobServiceDashboard(unittest.TestCase, object):
self.registry = Registry() self.registry = Registry()
self.scan_all = ScanAll() self.scan_all = ScanAll()
self.schedule = Schedule() self.schedule = Schedule()
self.job_types = [ "GARBAGE_COLLECTION", "PURGE_AUDIT_LOG", "P2P_PREHEAT", "IMAGE_SCAN", "REPLICATION", "RETENTION", "SCAN_DATA_EXPORT", "SCHEDULER", "SLACK", "SYSTEM_ARTIFACT_CLEANUP", "WEBHOOK", "EXECUTION_SWEEP"] self.job_types = [ "GARBAGE_COLLECTION", "PURGE_AUDIT_LOG", "P2P_PREHEAT", "IMAGE_SCAN", "REPLICATION", "RETENTION", "SCAN_DATA_EXPORT", "SCHEDULER", "SLACK", "SYSTEM_ARTIFACT_CLEANUP", "WEBHOOK", "EXECUTION_SWEEP", "AUDIT_LOGS_GDPR_COMPLIANT"]
self.cron_type = "Custom" self.cron_type = "Custom"
self.cron = "0 0 0 * * 0" self.cron = "0 0 0 * * 0"

View File

@ -15,6 +15,7 @@ resource = os.environ.get("RESOURCE")
ID_PLACEHOLDER = "(id)" ID_PLACEHOLDER = "(id)"
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class Permission: class Permission:
@ -38,6 +39,7 @@ class Permission:
elif self.res_id_field and self.payload_id_field and self.id_from_header == True: elif self.res_id_field and self.payload_id_field and self.id_from_header == True:
self.payload[self.payload_id_field] = int(response.headers["Location"].split("/")[-1]) self.payload[self.payload_id_field] = int(response.headers["Location"].split("/")[-1])
resource_permissions = {} resource_permissions = {}
# audit logs permissions start # audit logs permissions start
list_audit_logs = Permission("{}/audit-logs".format(harbor_base_url), "GET", 200) list_audit_logs = Permission("{}/audit-logs".format(harbor_base_url), "GET", 200)
@ -167,9 +169,9 @@ resource_permissions["replication-policy"] = replication_and_policy
# replication permissions start # replication permissions start
replication_policy_id = None replication_policy_id = None
replication_policy_name = "replication-policy-{}".format(random.randint(1000, 9999)) replication_policy_name = "replication-policy-{}".format(random.randint(1000, 9999))
result = urlsplit(harbor_base_url)
endpoint_URL = "{}://{}".format(result.scheme, result.netloc)
if resource == "replication": if resource == "replication":
result = urlsplit(harbor_base_url)
endpoint_URL = "{}://{}".format(result.scheme, result.netloc)
replication_registry_payload = { replication_registry_payload = {
"credential": { "credential": {
"access_key": admin_user_name, "access_key": admin_user_name,
@ -225,6 +227,94 @@ replication = [ create_replication_execution, list_replication_execution, read_r
resource_permissions["replication"] = replication resource_permissions["replication"] = replication
# replication permissions end # replication permissions end
# scan all permissions start
scan_all_weekly_schedule_payload = {
"schedule": {
"type": "Weekly",
"cron": "0 0 0 * * 0"
}
}
scan_all_reset_schedule_payload = {
"schedule": {
"type": "None",
"cron": ""
}
}
create_scan_all_schedule = Permission("{}/system/scanAll/schedule".format(harbor_base_url), "POST", 201, scan_all_weekly_schedule_payload)
update_scan_all_schedule = Permission("{}/system/scanAll/schedule".format(harbor_base_url), "PUT", 200, scan_all_reset_schedule_payload)
stop_scan_all = Permission("{}/system/scanAll/stop".format(harbor_base_url), "POST", 202)
scan_all_metrics = Permission("{}/scans/all/metrics".format(harbor_base_url), "GET", 200)
scan_all_schedule_metrics = Permission("{}/scans/schedule/metrics".format(harbor_base_url), "GET", 200)
scan_all = [ create_scan_all_schedule, update_scan_all_schedule, stop_scan_all, scan_all_metrics, scan_all_schedule_metrics ]
resource_permissions["scan-all"] = scan_all
# scan all permissions end
# system volumes permissions start
read_system_volumes = Permission("{}/systeminfo/volumes".format(harbor_base_url), "GET", 200)
system_volumes = [ read_system_volumes ]
resource_permissions["system-volumes"] = system_volumes
# system volumes permissions end
# jobservice monitor permissions start
list_jobservice_pool = Permission("{}/jobservice/pools".format(harbor_base_url), "GET", 200)
list_jobservice_pool_worker = Permission("{}/jobservice/pools/{}/workers".format(harbor_base_url, "88888888"), "GET", 200)
stop_jobservice_job = Permission("{}/jobservice/jobs/{}".format(harbor_base_url, "88888888"), "PUT", 200)
get_jobservice_job_log = Permission("{}/jobservice/jobs/{}/log".format(harbor_base_url, "88888888"), "GET", 500)
list_jobservice_queue = Permission("{}/jobservice/queues".format(harbor_base_url), "GET", 200)
stop_jobservice = Permission("{}/jobservice/queues/{}".format(harbor_base_url, "88888888"), "PUT", 200, payload={ "action": "stop" })
jobservice_monitor = [ list_jobservice_pool, list_jobservice_pool_worker, stop_jobservice_job, get_jobservice_job_log, list_jobservice_queue, stop_jobservice ]
resource_permissions["jobservice-monitor"] = jobservice_monitor
# jobservice monitor permissions end
# scanner permissions start
scanner_payload = {
"name": "scanner-{}".format(random.randint(1000, 9999)),
"url": "https://{}".format(random.randint(1000, 9999)),
"description": None,
"auth": "",
"skip_certVerify": False,
"use_internal_addr": False
}
list_scanner = Permission("{}/scanners".format(harbor_base_url), "GET", 200)
create_scanner = Permission("{}/scanners".format(harbor_base_url), "POST", 500, payload=scanner_payload)
ping_scanner = Permission("{}/scanners/ping".format(harbor_base_url), "POST", 500, payload=scanner_payload)
read_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "GET", 404)
update_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "PUT", 404, payload=scanner_payload)
delete_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "DELETE", 404)
set_default_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "PATCH", 404, payload={ "is_default": True })
get_scanner_metadata = Permission("{}/scanners/{}/metadata".format(harbor_base_url, "88888888"), "GET", 404)
scanner = [ list_scanner, create_scanner, ping_scanner, read_scanner, update_scanner, delete_scanner, set_default_scanner, get_scanner_metadata ]
resource_permissions["scanner"] = scanner
# scanner permissions end
# system label permissions start
label_payload = {
"name": "label-{}".format(random.randint(1000, 9999)),
"description": "",
"color": "",
"scope": "g",
"project_id": 0
}
create_label = Permission("{}/labels".format(harbor_base_url), "POST", 201, label_payload, "id", id_from_header=True)
read_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "GET", 200, payload=label_payload, payload_id_field="id")
update_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "PUT", 200, payload=label_payload, payload_id_field="id")
delete_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "DELETE", 200, payload=label_payload, payload_id_field="id")
label = [ create_label, read_label, update_label, delete_label ]
resource_permissions["label"] = label
# system label permissions end
# security hub permissions start
read_summary = Permission("{}/security/summary".format(harbor_base_url), "GET", 200)
list_vul = Permission("{}/security/vul".format(harbor_base_url), "GET", 200)
security_hub = [ read_summary, list_vul ]
resource_permissions["security-hub"] = security_hub
# security hub permissions end
# catalog permissions start
read_catalog = Permission("{}/v2/_catalog".format(endpoint_URL), "GET", 200)
catalog = [ read_catalog ]
resource_permissions["catalog"] = catalog
# catalog permissions end
def main(): def main():

View File

@ -0,0 +1,365 @@
import copy
import json
import time
import requests
import urllib3
import os
admin_name = os.environ.get("ADMIN_NAME")
admin_password = os.environ.get("ADMIN_PASSWORD")
user_name = os.environ.get("USER_NAME")
password = os.environ.get("PASSWORD")
harbor_base_url = os.environ.get("HARBOR_BASE_URL")
resource = os.environ.get("RESOURCE")
project_id = os.environ.get("PROJECT_ID")
project_name = os.environ.get("PROJECT_NAME")
# the source artifact should belong to the provided project, e.g. "nginx"
source_artifact_name = os.environ.get("SOURCE_ARTIFACT_NAME")
# the source artifact tag should belong to the provided project, e.g. "latest"
source_artifact_tag = os.environ.get("SOURCE_ARTIFACT_TAG")
id_or_name = None
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class Permission:
def __init__(self, url, method, expect_status_code, payload=None, need_id_or_name=False, res_id_field=None,
payload_id_field=None):
self.url = url
self.method = method
self.expect_status_code = expect_status_code
self.payload = payload
self.res_id_field = res_id_field
self.need_id_or_name = need_id_or_name
self.payload_id_field = payload_id_field if payload_id_field else res_id_field
def call(self):
global id_or_name
url = self.url
if self.need_id_or_name:
url = self.url.format(id_or_name)
response = requests.request(self.method, url, data=json.dumps(self.payload), verify=False,
auth=(user_name, password), headers={
"Content-Type": "application/json"
})
print("response: {}".format(response.text))
assert response.status_code == self.expect_status_code, ("Failed to call the {} {}, expected status code is {"
"}, but got {}, error msg is {}").format(
self.method, self.url, self.expect_status_code, response.status_code, response.text)
if self.res_id_field and self.payload_id_field and len(json.loads(response.text)) > 0:
id_or_name = json.loads(response.text)[0][self.res_id_field]
# Project permissions:
# 1. Resource: label, actions: ['read', 'create', 'update', 'delete', 'list']
label_payload = {
"color": "#FFFFFF",
"description": "Just for testing",
"name": "label-name-{}".format(int(round(time.time() * 1000))),
"project_id": int(project_id),
"scope": "p",
"id": None
}
create_label = Permission("{}/labels".format(harbor_base_url), "POST", 201, label_payload)
list_label = Permission("{}/labels?scope=p&project_id={}".format(harbor_base_url, project_id), "GET", 200,
label_payload, False, "id", "id")
read_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "GET", 200, label_payload, True)
label_payload_for_update = copy.deepcopy(label_payload)
label_payload_for_update["description"] = "For update"
update_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "PUT", 200, label_payload_for_update, True)
delete_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "DELETE", 200, label_payload, True)
# 2. Resource: project, actions: ['read', 'update', 'delete']
project_payload_for_update = {"project_name": "test", "metadata": {"public": "false"}, "storage_limit": -1}
read_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "GET", 200, project_payload_for_update,
False)
update_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "PUT", 200,
project_payload_for_update, False)
delete_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "DELETE", 200,
project_payload_for_update, False)
deletable_project = Permission("{}/projects/{}/_deletable".format(harbor_base_url, project_id), "GET", 200,
project_payload_for_update, False)
# 3. Resource: metadata actions: ['read', 'list', 'create', 'update', 'delete'],
metadata_payload = {
"auto_scan": "true"
}
create_metadata = Permission("{}/projects/{}/metadatas".format(harbor_base_url, project_id), "POST", 200,
metadata_payload, False)
list_metadata = Permission("{}/projects/{}/metadatas".format(harbor_base_url, project_id), "GET", 200, metadata_payload,
False, )
read_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "GET", 200,
metadata_payload, False)
metadata_payload_for_update = {
"auto_scan": "false"
}
update_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "PUT", 200,
metadata_payload_for_update, False)
delete_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "DELETE", 200,
metadata_payload, False)
# 4. Resource: repository actions: ['read', 'list', 'update', 'delete', 'pull', 'push']
# note: pull and push are for docker cli, no API needs them
list_repo = Permission("{}/projects/{}/repositories".format(harbor_base_url, project_name), "GET", 200)
read_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "GET", 404)
repo_payload_for_update = {
}
update_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "PUT", 404,
repo_payload_for_update)
delete_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "DELETE",
404)
# 5. Resource artifact actions: ['read', 'list', 'create', 'delete'],
list_artifact = Permission("{}/projects/{}/repositories/does_not_exist/artifacts".format(harbor_base_url, project_name),
"GET", 200)
read_artifact = Permission(
"{}/projects/{}/repositories/does_not_exist/artifacts/reference_does_not_exist".format(harbor_base_url,
project_name), "GET", 404)
copy_artifact = Permission(
"{}/projects/{}/repositories/target_repo/artifacts?from={}/{}:{}".format(harbor_base_url, project_name,
project_name, source_artifact_name,
source_artifact_tag), "POST", 201)
delete_artifact = Permission(
"{}/projects/{}/repositories/target_repo/artifacts/{}".format(harbor_base_url, project_name, source_artifact_tag),
"DELETE", 200)
# 6. Resource scan actions: ['read', 'create', 'stop']
create_scan = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/scan".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "POST", 202)
stop_scan = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/scan/stop".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "POST", 202)
read_scan = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/scan/0/log".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "get", 404)
# 7. Resource tag actions: ['list', 'create', 'delete']
tag_payload = {
"name": "test-{}".format(int(round(time.time() * 1000)))
}
create_tag = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/tags".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "POST", 201, tag_payload)
list_tag = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/tags".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "GET", 200)
delete_tag = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/tags/tag_name_does_not_exist".format(harbor_base_url, project_name,
source_artifact_name,
source_artifact_tag), "DELETE",
404)
# 8. Resource accessory actions: ['list']
list_accessory = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/accessories".format(harbor_base_url, project_name,
source_artifact_name, source_artifact_tag), "GET",
200)
# 9. Resource artifact-addition actions: ['read']
read_artifact_addition_vul = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/additions/vulnerabilities".format(harbor_base_url, project_name,
source_artifact_name,
source_artifact_tag), "GET", 200)
read_artifact_addition_dependencies = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/additions/dependencies".format(harbor_base_url, project_name,
source_artifact_name,
source_artifact_tag), "GET", 400)
# 10. Resource artifact-label actions: ['create', 'delete'],
artifact_label_payload = copy.deepcopy(label_payload)
artifact_label_payload["description"] = "Add label to an artifact"
add_label_to_artifact = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/labels".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "POST", 404,
artifact_label_payload)
delete_artifact_label = Permission(
"{}/projects/{}/repositories/{}/artifacts/{}/labels/0".format(harbor_base_url, project_name, source_artifact_name,
source_artifact_tag), "DELETE", 404,
artifact_label_payload)
# 11. Resource scanner actions: ['create', 'read']
update_project_scanner = Permission("{}/projects/{}/scanner".format(harbor_base_url, project_id), "PUT", 200,
{"uuid": "faked_uuid"})
read_project_scanner = Permission("{}/projects/{}/scanner".format(harbor_base_url, project_id), "GET", 200)
read_project_scanner_candidates = Permission("{}/projects/{}/scanner/candidates".format(harbor_base_url, project_id),
"GET", 200)
# 12. Resource preheat-policy actions: ['read', 'list', 'create', 'update', 'delete']
create_preheat_policy = Permission("{}/projects/{}/preheat/policies".format(harbor_base_url, project_name), "POST", 500,
{})
list_preheat_policy = Permission("{}/projects/{}/preheat/policies".format(harbor_base_url, project_name), "GET", 200)
read_preheat_policy = Permission(
"{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "GET", 404)
update_preheat_policy = Permission(
"{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "PUT", 500)
delete_preheat_policy = Permission(
"{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "DELETE", 404)
# 13. Resource immutable-tag actions: ['list', 'create', 'update', 'delete']
immutable_tag_rule_payload = {
"disabled": False,
"scope_selectors": {
"repository": [{"kind": "doublestar", "decoration": "repoMatches",
"pattern": "{}".format(int(round(time.time() * 1000)))}]},
"tag_selectors": [
{"kind": "doublestar", "decoration": "matches", "pattern": "{}".format(int(round(time.time() * 1000)))}],
}
create_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules".format(harbor_base_url, project_id), "POST",
201,
immutable_tag_rule_payload)
list_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules".format(harbor_base_url, project_id), "GET", 200)
update_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules/0".format(harbor_base_url, project_id), "PUT",
404)
delete_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules/0".format(harbor_base_url, project_id),
"DELETE", 404)
# 14. Resource tag-retention actions: ['read', 'list', 'create', 'update', 'delete']
tag_retention_rule_payload = {
"algorithm": "or",
"rules": [
{
"disabled": False,
"action": "retain",
"scope_selectors": {
"repository": [
{
"kind": "doublestar",
"decoration": "repoMatches",
"pattern": "**"
}
]
},
"tag_selectors": [
{
"kind": "doublestar",
"decoration": "matches",
"pattern": "**",
"extras": "{\"untagged\":true}"
}
],
"params": {},
"template": "always"
}
],
"trigger": {
"kind": "Schedule",
"references": {},
"settings": {
"cron": ""
}
},
"scope": {
"level": "project",
"ref": int(project_id)
}
}
response = requests.request("GET", "{}/projects/{}/metadatas/retention_id".format(harbor_base_url, project_id),
data=None, verify=False,
auth=(admin_name, admin_password), headers={"Content-Type": "application/json"})
create_status_code = 400 if "retention_id" in (json.loads(response.text)) else 201
create_tag_retention_rule = Permission("{}/retentions".format(harbor_base_url, project_id), "POST",
create_status_code,
tag_retention_rule_payload)
# get retention_id
response1 = requests.request("GET", "{}/projects/{}/metadatas/retention_id".format(harbor_base_url, project_id),
data=None, verify=False,
auth=(admin_name, admin_password), headers={"Content-Type": "application/json"})
retention_id = json.loads(response1.text)["retention_id"]
update_retention_payload = copy.deepcopy(tag_retention_rule_payload)
update_retention_payload["rules"][0]["disabled"] = True
read_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "GET", 200)
update_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "PUT", 200,
update_retention_payload)
delete_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "DELETE", 200)
execute_tag_retention = Permission("{}/retentions/{}/executions".format(harbor_base_url, retention_id), "POST", 201)
list_tag_retention_execution = Permission("{}/retentions/{}/executions".format(harbor_base_url, retention_id), "GET",
200)
stop_tag_retention = Permission("{}/retentions/{}/executions/0".format(harbor_base_url, retention_id), "PATCH", 404,
{"action": "stop"})
list_tag_retention_tasks = Permission("{}/retentions/{}/executions/0/tasks".format(harbor_base_url, retention_id),
"GET", 404)
read_tag_retention_tasks = Permission("{}/retentions/{}/executions/0/tasks/0".format(harbor_base_url, retention_id),
"GET", 404)
# 15. Resource log actions: ['list']
list_log = Permission("{}/projects/{}/logs".format(harbor_base_url, project_name), "GET", 200)
# 16. Resource notification-policy actions: ['read', 'list', 'create', 'update', 'delete']
webhook_payload = {
"name": "webhook-{}".format(int(round(time.time() * 1000))),
"description": "Just for test",
"project_id": int(project_id),
"targets": [
{
"type": "http",
"address": "http://test.com",
"skip_cert_verify": True,
"payload_format": "CloudEvents"
}
],
"event_types": [
"PUSH_ARTIFACT"
],
"enabled": True
}
create_webhook = Permission("{}/projects/{}/webhook/policies".format(harbor_base_url, project_id), "POST",
201,
webhook_payload)
list_webhook = Permission("{}/projects/{}/webhook/policies".format(harbor_base_url, project_id), "GET",
200)
read_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "GET",
404)
update_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "PUT",
404, {})
delete_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "DELETE",
404)
list_webhook_executions = Permission("{}/projects/{}/webhook/policies/0/executions".format(harbor_base_url, project_id),
"GET", 404)
list_webhook_executions_tasks = Permission(
"{}/projects/{}/webhook/policies/0/executions/0/tasks".format(harbor_base_url, project_id), "GET", 404)
read_webhook_executions_tasks = Permission(
"{}/projects/{}/webhook/policies/0/executions/0/tasks/0/log".format(harbor_base_url, project_id), "GET", 404)
list_webhook_events = Permission("{}/projects/{}/webhook/events".format(harbor_base_url, project_id), "GET", 200)
resource_permissions = {
"label": [create_label, list_label, read_label, update_label, delete_label],
"project": [read_project, update_project, deletable_project, delete_project],
"metadata": [create_metadata, list_metadata, read_metadata, update_metadata, delete_metadata],
"repository": [list_repo, read_repo, update_repo, delete_repo],
"artifact": [list_artifact, read_artifact, copy_artifact, delete_artifact],
"scan": [create_scan, stop_scan, read_scan],
"tag": [create_tag, list_tag, delete_tag],
"accessory": [list_accessory],
"artifact-addition": [read_artifact_addition_vul, read_artifact_addition_dependencies],
"artifact-label": [add_label_to_artifact, delete_artifact_label],
"scanner": [update_project_scanner, read_project_scanner, read_project_scanner_candidates],
"preheat-policy": [create_preheat_policy, list_preheat_policy, read_preheat_policy, update_preheat_policy,
delete_preheat_policy],
"immutable-tag": [create_immutable_tag_rule, list_immutable_tag_rule, update_immutable_tag_rule,
delete_immutable_tag_rule],
"tag-retention": [create_tag_retention_rule, read_tag_retention, update_tag_retention, execute_tag_retention,
list_tag_retention_execution, stop_tag_retention, list_tag_retention_tasks,
read_tag_retention_tasks, delete_tag_retention],
"log": [list_log],
"notification-policy": [create_webhook, list_webhook, read_webhook, update_webhook, delete_webhook,
list_webhook_executions, list_webhook_executions_tasks, read_webhook_executions_tasks,
list_webhook_events]
}
def main():
for permission in resource_permissions[resource]:
print("=================================================")
print("call: {} {}".format(permission.method, permission.url))
print("payload: {}".format(json.dumps(permission.payload)))
print("=================================================\n")
permission.call()
if __name__ == "__main__":
main()

View File

@ -194,10 +194,10 @@ class TestRobotAccount(unittest.TestCase):
# In this priviledge check list, make sure that each of lines and rows must # In this priviledge check list, make sure that each of lines and rows must
# contains both True and False value. # contains both True and False value.
check_list = [ check_list = [
[True, True, True, True, True, True, False, True, False, True], [True, True, True, False, True, False, True],
[False, False, False, False, True, True, False, True, True, False], [False, False, False, False, True, True, False],
[True, False, True, False, True, False, True, False, True, True], [True, False, True, True, False, True, True],
[False, False, False, True, False, True, False, True, True, False] [False, False, False, False, True, True, False]
] ]
access_list_list = [] access_list_list = []
for i in range(len(check_list)): for i in range(len(check_list)):
@ -240,12 +240,12 @@ class TestRobotAccount(unittest.TestCase):
repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_tag", "latest_1") repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_tag", "latest_1")
self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **ADMIN_CLIENT) self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **ADMIN_CLIENT)
if project_access["check_list"][6]: #---tag:create--- if project_access["check_list"][3]: #---tag:create---
self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", **SYSTEM_RA_CLIENT) self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", **SYSTEM_RA_CLIENT)
else: else:
self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", expect_status_code = 403, **SYSTEM_RA_CLIENT) self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", expect_status_code = 403, **SYSTEM_RA_CLIENT)
if project_access["check_list"][7]: #---tag:delete--- if project_access["check_list"][4]: #---tag:delete---
self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **SYSTEM_RA_CLIENT) self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **SYSTEM_RA_CLIENT)
else: else:
self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", expect_status_code = 403, **SYSTEM_RA_CLIENT) self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", expect_status_code = 403, **SYSTEM_RA_CLIENT)
@ -253,12 +253,12 @@ class TestRobotAccount(unittest.TestCase):
repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_artifact_label", "latest_1") repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_artifact_label", "latest_1")
#Add project level label to artifact #Add project level label to artifact
label_id, _ = self.label.create_label(project_id = project_access["project_id"], scope = "p", **ADMIN_CLIENT) label_id, _ = self.label.create_label(project_id = project_access["project_id"], scope = "p", **ADMIN_CLIENT)
if project_access["check_list"][8]: #---artifact-label:create--- if project_access["check_list"][5]: #---artifact-label:create---
self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), **SYSTEM_RA_CLIENT) self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), **SYSTEM_RA_CLIENT)
else: else:
self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), expect_status_code = 403, **SYSTEM_RA_CLIENT) self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), expect_status_code = 403, **SYSTEM_RA_CLIENT)
if project_access["check_list"][9]: #---scan:create--- if project_access["check_list"][6]: #---scan:create---
self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, **SYSTEM_RA_CLIENT) self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, **SYSTEM_RA_CLIENT)
else: else:
self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, expect_status_code = 403, **SYSTEM_RA_CLIENT) self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, expect_status_code = 403, **SYSTEM_RA_CLIENT)
@ -325,7 +325,7 @@ class TestRobotAccount(unittest.TestCase):
self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT) self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT)
#20. Add a system robot account with all projects coverd; #20. Add a system robot account with all projects coverd;
all_true_access_list= self.robot.create_access_list( [True] * 10 ) all_true_access_list= self.robot.create_access_list( [True] * 7 )
robot_account_Permissions_list = [] robot_account_Permissions_list = []
robot_account_Permissions = v2_swagger_client.RobotPermission(kind = "project", namespace = "*", access = all_true_access_list) robot_account_Permissions = v2_swagger_client.RobotPermission(kind = "project", namespace = "*", access = all_true_access_list)
robot_account_Permissions_list.append(robot_account_Permissions) robot_account_Permissions_list.append(robot_account_Permissions)

View File

@ -3,5 +3,5 @@ set -x
set -e set -e
sudo make package_online GOBUILDTAGS="include_oss include_gcs" VERSIONTAG=dev-gitaction PKGVERSIONTAG=dev-gitaction UIVERSIONTAG=dev-gitaction GOBUILDIMAGE=golang:1.21.3 COMPILETAG=compile_golangimage TRIVYFLAG=true HTTPPROXY= PULL_BASE_FROM_DOCKERHUB=false sudo make package_online GOBUILDTAGS="include_oss include_gcs" VERSIONTAG=dev-gitaction PKGVERSIONTAG=dev-gitaction UIVERSIONTAG=dev-gitaction GOBUILDIMAGE=golang:1.21.4 COMPILETAG=compile_golangimage TRIVYFLAG=true HTTPPROXY= PULL_BASE_FROM_DOCKERHUB=false
sudo make package_offline GOBUILDTAGS="include_oss include_gcs" VERSIONTAG=dev-gitaction PKGVERSIONTAG=dev-gitaction UIVERSIONTAG=dev-gitaction GOBUILDIMAGE=golang:1.21.3 COMPILETAG=compile_golangimage TRIVYFLAG=true HTTPPROXY= PULL_BASE_FROM_DOCKERHUB=false sudo make package_offline GOBUILDTAGS="include_oss include_gcs" VERSIONTAG=dev-gitaction PKGVERSIONTAG=dev-gitaction UIVERSIONTAG=dev-gitaction GOBUILDIMAGE=golang:1.21.4 COMPILETAG=compile_golangimage TRIVYFLAG=true HTTPPROXY= PULL_BASE_FROM_DOCKERHUB=false

View File

@ -375,13 +375,29 @@ Back Project Home
[Arguments] ${project_name} [Arguments] ${project_name}
Retry Link Click //a[contains(.,'${project_name}')] Retry Link Click //a[contains(.,'${project_name}')]
Should Not Be Signed By Cosign Should Be Signed
[Arguments] ${tag}
Retry Wait Element Visible //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'signed')]
Should Not Be Signed
[Arguments] ${tag} [Arguments] ${tag}
Retry Wait Element Visible //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'color-red')] Retry Wait Element Visible //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'color-red')]
Should Be Signed By Cosign Should Be Signed By Cosign
[Arguments] ${tag} [Arguments] ${tag}=${null} ${digest}=${null}
Retry Wait Element Visible //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'signed')] IF '${tag}' != '${null}'
Retry Wait Element Visible //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/clr-tooltip/div/div/span[contains(.,'${tag}')] and .//clr-dg-row[.//img[@title='signature.cosign']]]
ELSE
Retry Wait Element Visible //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/a[contains(.,'${digest}')] and .//clr-dg-row[.//img[@title='signature.cosign']]]
END
Should Be Signed By Notation
[Arguments] ${tag}=${null} ${digest}=${null}
IF '${tag}' != '${null}'
Retry Wait Element Visible //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/clr-tooltip/div/div/span[contains(.,'${tag}')] and .//clr-dg-row[.//img[@title='signature.notation']]]
ELSE
Retry Wait Element Visible //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/a[contains(.,'${digest}')] and .//clr-dg-row[.//img[@title='signature.notation']]]
END
Delete Accessory Delete Accessory
[Arguments] ${tag} [Arguments] ${tag}

View File

@ -0,0 +1,26 @@
# Copyright Project Harbor Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
*** Settings ***
Documentation This resource provides helper functions for docker operations
Library OperatingSystem
Library Process
*** Keywords ***
Notation Generate Cert
Run And Return Rc And Output notation cert generate-test --default wabbit-networks.io
Notation Sign
[Arguments] ${artifact}
Wait Unitl Command Success notation sign -d --allow-referrers-api ${artifact}

View File

@ -82,6 +82,7 @@ Resource SeleniumUtil.robot
Resource Nightly-Util.robot Resource Nightly-Util.robot
Resource APITest-Util.robot Resource APITest-Util.robot
Resource Cosign_Util.robot Resource Cosign_Util.robot
Resource Notation_Util.robot
Resource Imgpkg-Util.robot Resource Imgpkg-Util.robot
Resource Webhook-Util.robot Resource Webhook-Util.robot
Resource TestCaseBody.robot Resource TestCaseBody.robot

View File

@ -474,19 +474,28 @@ Test Case - Copy A Image And Accessory
Create An New Project And Go Into Project ${source_project} Create An New Project And Go Into Project ${source_project}
Push Image With Tag ${ip} ${user} ${pwd} ${source_project} ${image} ${tag} Push Image With Tag ${ip} ${user} ${pwd} ${source_project} ${image} ${tag}
Cosign Generate Key Pair
Docker Login ${ip} ${user} ${pwd} Docker Login ${ip} ${user} ${pwd}
Cosign Generate Key Pair
Cosign Sign ${ip}/${source_project}/${image}:${tag} Cosign Sign ${ip}/${source_project}/${image}:${tag}
Docker Logout ${ip} Notation Generate Cert
Notation Sign ${ip}/${source_project}/${image}:${tag}
Go Into Repo ${source_project} ${image} Go Into Repo ${source_project} ${image}
Should Be Signed ${tag}
Retry Button Click ${artifact_list_accessory_btn}
Should Be Signed By Cosign ${tag} Should Be Signed By Cosign ${tag}
Should Be Signed By Notation ${tag}
Copy Image ${tag} ${target_project} ${image} Copy Image ${tag} ${target_project} ${image}
Retry Double Keywords When Error Go Into Project ${target_project} Retry Wait Until Page Contains ${image} Retry Double Keywords When Error Go Into Project ${target_project} Retry Wait Until Page Contains ${image}
Go Into Repo ${target_project} ${image} Go Into Repo ${target_project} ${image}
Retry Wait Until Page Contains Element //clr-dg-row[contains(.,${tag})] Retry Wait Until Page Contains Element //clr-dg-row[contains(.,${tag})]
Should Be Signed ${tag}
Retry Button Click ${artifact_list_accessory_btn}
Should Be Signed By Cosign ${tag} Should Be Signed By Cosign ${tag}
Should Be Signed By Notation ${tag}
Docker Logout ${ip}
Close Browser Close Browser
Test Case - Create An New Project With Quotas Set Test Case - Create An New Project With Quotas Set
@ -772,14 +781,14 @@ Test Case - Cosign And Cosign Deployment Security Policy
Push Image With Tag ${ip} ${user} ${pwd} project${d} ${image} ${tag} Push Image With Tag ${ip} ${user} ${pwd} project${d} ${image} ${tag}
Go Into Project project${d} Go Into Project project${d}
Go Into Repo project${d} ${image} Go Into Repo project${d} ${image}
Should Not Be Signed By Cosign ${tag} Should Not Be Signed ${tag}
Cannot Pull Image ${ip} ${user} ${pwd} project${d} ${image}:${tag} err_msg=The image is not signed by cosign. Cannot Pull Image ${ip} ${user} ${pwd} project${d} ${image}:${tag} err_msg=The image is not signed by cosign.
Cosign Generate Key Pair Cosign Generate Key Pair
Cosign Verify ${ip}/project${d}/${image}:${tag} ${false} Cosign Verify ${ip}/project${d}/${image}:${tag} ${false}
Cosign Sign ${ip}/project${d}/${image}:${tag} Cosign Sign ${ip}/project${d}/${image}:${tag}
Cosign Verify ${ip}/project${d}/${image}:${tag} ${true} Cosign Verify ${ip}/project${d}/${image}:${tag} ${true}
Retry Double Keywords When Error Retry Element Click ${artifact_list_refresh_btn} Should Be Signed By Cosign ${tag} Retry Double Keywords When Error Retry Element Click ${artifact_list_refresh_btn} Should Be Signed ${tag}
Pull image ${ip} ${user} ${pwd} project${d} ${image}:${tag} Pull image ${ip} ${user} ${pwd} project${d} ${image}:${tag}
Retry Double Keywords When Error Delete Accessory ${tag} Should be Accessory deleted ${tag} Retry Double Keywords When Error Delete Accessory ${tag} Should be Accessory deleted ${tag}

View File

@ -389,16 +389,16 @@ Test Case - Robot Account Do Replication
Logout Harbor Logout Harbor
Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Image Should Be Replicated To Project project_dest${d} ${image1} Image Should Be Replicated To Project project_dest${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Image Should Be Replicated To Project project_dest${d} ${image2} Image Should Be Replicated To Project project_dest${d} ${image2}
Should Be Signed By Cosign ${tag2} Should Be Signed ${tag2}
Back Project Home project_dest${d} Back Project Home project_dest${d}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed ${image2_short_sha256}
# pull mode # pull mode
Logout Harbor Logout Harbor
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
@ -409,16 +409,16 @@ Test Case - Robot Account Do Replication
Check Latest Replication Job Status Succeeded Check Latest Replication Job Status Succeeded
Check Latest Replication Enabled Copy By Chunk Check Latest Replication Enabled Copy By Chunk
Image Should Be Replicated To Project project_dest${d} ${image1} Image Should Be Replicated To Project project_dest${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Image Should Be Replicated To Project project_dest${d} ${image2} Image Should Be Replicated To Project project_dest${d} ${image2}
Should Be Signed By Cosign ${tag2} Should Be Signed ${tag2}
Back Project Home project_dest${d} Back Project Home project_dest${d}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed ${image2_short_sha256}
Close Browser Close Browser
Test Case - Replication Triggered By Events Test Case - Replication Triggered By Events
@ -468,28 +468,28 @@ Test Case - Replication Triggered By Events
Logout Harbor Logout Harbor
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Go Into Repo project${d} ${image1} Go Into Repo project${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Go Into Repo project${d} ${index} Go Into Repo project${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project${d} ${index} Go Into Repo project${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project${d} ${image2} Go Into Repo project${d} ${image2}
Should Not Be Signed By Cosign ${tag2} Should Not Be Signed ${tag2}
Go Into Repo project${d} ${index} Go Into Repo project${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed ${image2_short_sha256}
Logout Harbor Logout Harbor
Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Go Into Repo project_dest${d} ${image1} Go Into Repo project_dest${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project_dest${d} ${image2} Go Into Repo project_dest${d} ${image2}
Should Not Be Signed By Cosign ${tag2} Should Not Be Signed ${tag2}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Not Be Signed ${image2_short_sha256}
Logout Harbor Logout Harbor
# delete # delete
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
@ -498,13 +498,13 @@ Test Case - Replication Triggered By Events
Repo Not Exist project${d} ${image2} Repo Not Exist project${d} ${image2}
Go Into Repo project${d} ${image1} Go Into Repo project${d} ${image1}
Retry Double Keywords When Error Delete Accessory ${tag1} Should be Accessory deleted ${tag1} Retry Double Keywords When Error Delete Accessory ${tag1} Should be Accessory deleted ${tag1}
Should Not Be Signed By Cosign ${tag1} Should Not Be Signed ${tag1}
Go Into Repo project${d} ${index} Go Into Repo project${d} ${index}
Retry Double Keywords When Error Delete Accessory ${index_tag} Should be Accessory deleted ${index_tag} Retry Double Keywords When Error Delete Accessory ${index_tag} Should be Accessory deleted ${index_tag}
Should Not Be Signed By Cosign ${index_tag} Should Not Be Signed ${index_tag}
Click Index Achieve ${index_tag} Click Index Achieve ${index_tag}
Retry Double Keywords When Error Delete Accessory ${image1_short_sha256} Should be Accessory deleted ${image1_short_sha256} Retry Double Keywords When Error Delete Accessory ${image1_short_sha256} Should be Accessory deleted ${image1_short_sha256}
Should Not Be Signed By Cosign ${image1_short_sha256} Should Not Be Signed ${image1_short_sha256}
Logout Harbor Logout Harbor
Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} Sign In Harbor https://${ip1} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
@ -512,12 +512,12 @@ Test Case - Replication Triggered By Events
Wait Until Page Contains We couldn't find any artifacts! Wait Until Page Contains We couldn't find any artifacts!
Go Into Repo project_dest${d} ${image1} Go Into Repo project_dest${d} ${image1}
Should be Accessory deleted ${tag1} Should be Accessory deleted ${tag1}
Should Not Be Signed By Cosign ${tag1} Should Not Be Signed ${tag1}
Go Into Repo project_dest${d} ${index} Go Into Repo project_dest${d} ${index}
Should be Accessory deleted ${index_tag} Should be Accessory deleted ${index_tag}
Should Not Be Signed By Cosign ${index_tag} Should Not Be Signed ${index_tag}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should be Accessory deleted ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should be Accessory deleted ${image1_short_sha256}
Should Not Be Signed By Cosign ${image1_short_sha256} Should Not Be Signed ${image1_short_sha256}
Close Browser Close Browser
Test Case - Enable Replication Of Cosign Deployment Security Policy Test Case - Enable Replication Of Cosign Deployment Security Policy
@ -595,15 +595,15 @@ Test Case - Enable Replication Of Cosign Deployment Security Policy
Repo Exist project_pull_dest${d} ${image2} Repo Exist project_pull_dest${d} ${image2}
Repo Exist project_pull_dest${d} ${index} Repo Exist project_pull_dest${d} ${index}
Go Into Repo project_pull_dest${d} ${image1} Go Into Repo project_pull_dest${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Go Into Repo project_pull_dest${d} ${image2} Go Into Repo project_pull_dest${d} ${image2}
Should Be Signed By Cosign ${tag2} Should Be Signed ${tag2}
Go Into Repo project_pull_dest${d} ${index} Go Into Repo project_pull_dest${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project_pull_dest${d} ${index} Go Into Repo project_pull_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project_pull_dest${d} ${index} Go Into Repo project_pull_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image2_short_sha256}
# check project_push_dest # check project_push_dest
Go Into Project project_push_dest${d} Go Into Project project_push_dest${d}
Switch To Project Repo Switch To Project Repo
@ -611,15 +611,15 @@ Test Case - Enable Replication Of Cosign Deployment Security Policy
Repo Exist project_push_dest${d} ${image2} Repo Exist project_push_dest${d} ${image2}
Repo Exist project_push_dest${d} ${index} Repo Exist project_push_dest${d} ${index}
Go Into Repo project_push_dest${d} ${image1} Go Into Repo project_push_dest${d} ${image1}
Should Be Signed By Cosign ${tag1} Should Be Signed ${tag1}
Go Into Repo project_push_dest${d} ${image2} Go Into Repo project_push_dest${d} ${image2}
Should Be Signed By Cosign ${tag2} Should Be Signed ${tag2}
Go Into Repo project_push_dest${d} ${index} Go Into Repo project_push_dest${d} ${index}
Should Be Signed By Cosign ${index_tag} Should Be Signed ${index_tag}
Go Into Repo project_push_dest${d} ${index} Go Into Repo project_push_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image1_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image1_short_sha256}
Go Into Repo project_push_dest${d} ${index} Go Into Repo project_push_dest${d} ${index}
Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed By Cosign ${image2_short_sha256} Retry Double Keywords When Error Click Index Achieve ${index_tag} Should Be Signed ${image2_short_sha256}
Close Browser Close Browser
Test Case - Carvel Imgpkg Copy To Harbor Test Case - Carvel Imgpkg Copy To Harbor