Merge pull request #14041 from danfengliu/upgrade-containerd-in-e2e-dockerfile

Upgrade containerd in E2E Dockerfile
This commit is contained in:
danfengliu 2021-01-29 09:42:41 +08:00 committed by GitHub
commit b0e54f5a33
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 138 additions and 68 deletions

View File

@ -38,6 +38,11 @@ def docker_manifest_create(index, manifests):
print( "Docker Manifest Command: ", command)
base.run_command(command)
def docker_images_all_list():
command = ["sudo", "docker","images","-a"]
print( "Docker images Command: ", command)
base.run_command(command)
def docker_manifest_push(index):
command = ["sudo", "docker","manifest","push",index]
print( "Docker Manifest Command: ", command)
@ -116,7 +121,15 @@ class DockerAPI(object):
if str(err_message).lower().find("error".lower()) >= 0:
raise Exception(r" It's was not suppose to catch error when login registry {}, return message is [{}]".format (registry, err_message))
def docker_image_pull(self, image, tag = None, expected_error_message = None):
def docker_image_remove(self, image, tag = "latest"):
docker_images_all_list()
try:
self.DCLIENT.remove_image(image+":"+tag, force=True, noprune=False)
except Exception as err:
print( "Docker image remove catch exception:", str(err))
docker_images_all_list()
def docker_image_pull(self, image, tag = None, expected_error_message = None, is_remove_image = True):
ret = ""
err_message = ""
if tag is not None:
@ -144,6 +157,8 @@ class DockerAPI(object):
else:
if str(err_message).lower().find("error".lower()) >= 0:
raise Exception(r" It's was not suppose to catch error when pull image {}, return message is [{}]".format (image, err_message))
if is_remove_image:
self.docker_image_remove(image, _tag)
def docker_image_tag(self, image, harbor_registry, tag = None):
_tag = base._random_name("tag")
@ -185,6 +200,7 @@ class DockerAPI(object):
def docker_image_build(self, harbor_registry, tags=None, size=1, expected_error_message = None):
ret = ""
err_message = ""
docker_images_all_list()
try:
baseimage='busybox:latest'
self.DCLIENT.login(username=DOCKER_USER, password=DOCKER_PWD)
@ -208,7 +224,7 @@ class DockerAPI(object):
ret = self.DCLIENT.push(repo)
print("docker_image_push ret:", ret)
print("build image {} with size {}".format(repo, size))
self.DCLIENT.remove_image(repo)
self.DCLIENT.remove_image(repo, force=True, noprune=False)
self.DCLIENT.remove_container(c)
#self.DCLIENT.pull(repo)
#image = self.DCLIENT2.images.get(repo)
@ -229,3 +245,4 @@ class DockerAPI(object):
else:
if str(err_message).lower().find("error".lower()) >= 0:
raise Exception(r" It's was not suppose to catch error when build image {}, return message is [{}]".format (harbor_registry, err_message))
docker_images_all_list()

View File

@ -19,7 +19,7 @@ def push_image_to_project(project_name, registry, username, password, image, tag
print("Start to push image {}/{}/{}:{}".format(registry, project_name, image, tag) )
_docker_api = DockerAPI()
_docker_api.docker_login("docker", DOCKER_USER, DOCKER_PWD)
_docker_api.docker_image_pull(image, tag = tag)
_docker_api.docker_image_pull(image, tag = tag, is_remove_image = False)
_docker_api.docker_login(registry, username, password, expected_error_message = expected_login_error_message)
time.sleep(2)
if expected_login_error_message != None:
@ -29,12 +29,14 @@ def push_image_to_project(project_name, registry, username, password, image, tag
image = new_image or image
if profix_for_image == None:
new_harbor_registry, new_tag = _docker_api.docker_image_tag(r'{}:{}'.format(original_name, tag), r'{}/{}/{}'.format(registry, project_name, image), tag = tag)
target_image = r'{}/{}/{}'.format(registry, project_name, image)
else:
new_harbor_registry, new_tag = _docker_api.docker_image_tag(r'{}:{}'.format(original_name, tag), r'{}/{}/{}/{}'.format(registry, project_name, profix_for_image, image), tag = tag)
target_image = r'{}/{}/{}/{}'.format(registry, project_name, profix_for_image, image)
new_harbor_registry, new_tag = _docker_api.docker_image_tag(r'{}:{}'.format(original_name, tag), target_image, tag = tag)
time.sleep(2)
_docker_api.docker_image_push(new_harbor_registry, new_tag, expected_error_message = expected_error_message)
_docker_api.docker_image_remove(target_image, tag=tag)
_docker_api.docker_image_remove(original_name, tag=tag)
return r'{}/{}'.format(project_name, image), new_tag
def push_self_build_image_to_project(project_name, registry, username, password, image, tag, size=2, expected_login_error_message = None, expected_error_message = None):

View File

@ -2,6 +2,7 @@
IP=$1
NOTARY_URL=$5
PASSHRASE='Harbor12345'
IMAGE=$IP/$2/$3:$4
echo $IP
@ -17,6 +18,8 @@ export DOCKER_CONTENT_TRUST_OFFLINE_PASSPHRASE=$PASSHRASE
export DOCKER_CONTENT_TRUST_TAGGING_PASSPHRASE=$PASSHRASE
docker login -u admin -p Harbor12345 $IP
docker tag $3:$4 $IP/$2/$3:$4
docker push $IP/$2/$3:$4
docker tag $3:$4 $IMAGE
docker push $IMAGE
docker rmi -f $IMAGE

View File

@ -11,7 +11,6 @@ from library.user import User
from library.repository import Repository
from library.repository import push_self_build_image_to_project
from library.repository import pull_harbor_image
class TestProjects(unittest.TestCase):
@suppress_urllib3_warning
def setUp(self):

View File

@ -176,9 +176,9 @@ class TestRobotAccount(unittest.TestCase):
15. Verify the system robot account has no the corresponding right for this new project;
16. Delete this project;
17. List system robot account successfully;
18. Delete the system robot account;
18. Delete the system robot account;
19. Verify the system robot account has no the corresponding right;
20. Add a system robot account with all project coverd;
20. Add a system robot account with all projects coverd;
21. Verify the system robot account has no the corresponding right;
"""
#1. Define a number of access lists;
@ -336,7 +336,7 @@ class TestRobotAccount(unittest.TestCase):
#19. Verify the system robot account has no the corresponding right;
self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT)
#20. Add a system robot account with all project coverd;
#20. Add a system robot account with all projects coverd;
all_true_access_list= self.robot.create_access_list( [True] * 10 )
robot_account_Permissions_list = []
robot_account_Permissions = v2_swagger_client.Permission(kind = "project", namespace = "*", access = all_true_access_list)

View File

@ -18,7 +18,7 @@ harbor_logs_bucket="harbor-ci-logs"
#echo "content_language = en" >> $botofile
#echo "default_project_id = $GS_PROJECT_ID" >> $botofile
DIR="$(cd "$(dirname "$0")" && pwd)"
E2E_IMAGE="goharbor/harbor-e2e-engine:2.6"
E2E_IMAGE="goharbor/harbor-e2e-engine:2.6.1"
# GS util
function uploader {

View File

@ -3,7 +3,7 @@ ENV LANG C.UTF-8
# V 2.0
# V 2.0.1: upgrade docker to version 19.03.12
# V 2.5 Add support for e2e py-test (especially containerd)
# V 2.6 upgrade helm2, helm3 and docker 20.10.0
# V 2.6 docker 19.03.12
# V 2.6.1 upgrade helm2, helm3 and docker 20.10.1
RUN apt-get update && apt-get install -y --no-install-recommends wget curl gnupg2
@ -83,10 +83,9 @@ RUN apt-get update && apt install libnss3-tools && \
RUN pip3 install pyasn1 google-apitools==0.5.31 gsutil robotframework==3.2.1 robotframework-sshlibrary robotframework-httplibrary requests dbbot robotframework-seleniumlibrary==4.3.0 robotframework-pabot robotframework-JSONLibrary --upgrade
ENV CONTAINERD_VERSION 1.3.4
RUN wget https://storage.googleapis.com/cri-containerd-release/cri-containerd-${CONTAINERD_VERSION}.linux-amd64.tar.gz && \
tar --no-overwrite-dir -C / -xzf cri-containerd-${CONTAINERD_VERSION}.linux-amd64.tar.gz
ENV CRI_CONTAINERD_VERSION 1.3.4
RUN wget https://storage.googleapis.com/cri-containerd-release/cri-containerd-${CRI_CONTAINERD_VERSION}.linux-amd64.tar.gz && \
tar --no-overwrite-dir -C / -xzf cri-containerd-${CRI_CONTAINERD_VERSION}.linux-amd64.tar.gz
# Install docker, docker compose
ENV DOCKER_VERSION 20.10.1

View File

@ -85,6 +85,8 @@ Execute Result Should Be
${out} Run Keyword And Ignore Error Retry Wait Until Page Contains Element xpath=//clr-dg-cell[contains(., '${result}')]
Exit For Loop If '${out[0]}'=='PASS'
Sleep 6
Retry Element Click ${project_tag_retention_refresh_xpath}
Retry Wait Until Page Contains Element xpath=${project_tag_retention_record_yes_xpath}
END
Should Be Equal As Strings '${out[0]}' 'PASS'

View File

@ -31,6 +31,7 @@ ${project_tag_retention_config_save_xpath} //*[@id='config-save']
${project_tag_retention_schedule_ok_xpath} //*[@id='schedule-ok']
${project_tag_retention_span_daily_xpath} //cron-selection//div//span[contains(.,'0 0 0 * * *')]
${project_tag_retention_dry_run_xpath} //*[@id='dry-run']
${project_tag_retention_refresh_xpath} //clr-dg-action-bar/button[4]
${project_tag_retention_record_yes_xpath} //clr-datagrid[contains(.,'Yes')]
${project_tag_retention_run_now_xpath} //*[@id='run-now']
${project_tag_retention_execute_run_xpath} //*[@id='execute-run']

View File

@ -22,7 +22,12 @@ Resource ../../resources/Util.robot
Create An New Project And Go Into Project
[Arguments] ${projectname} ${public}=false ${count_quota}=${null} ${storage_quota}=${null} ${storage_quota_unit}=${null} ${proxy_cache}=${false} ${registry}=${null}
Navigate To Projects
Retry Button Click xpath=${create_project_button_xpath}
FOR ${n} IN RANGE 1 8
${out} Run Keyword And Ignore Error Retry Button Click xpath=${create_project_button_xpath}
Log All Return value is ${out[0]}
Exit For Loop If '${out[0]}'=='PASS'
Sleep 1
END
Log To Console Project Name: ${projectname}
Retry Text Input xpath=${project_name_xpath} ${projectname}
${element_project_public}= Set Variable xpath=${project_public_xpath}
@ -84,6 +89,7 @@ Switch To Project Tab Overflow
Sleep 1
Navigate To Projects
Reload Page
Retry Element Click xpath=${projects_xpath}
Sleep 2
@ -204,10 +210,10 @@ Do Log Advanced Search
Retry Element Click xpath=//audit-log//clr-dropdown/button
Retry Element Click xpath=//audit-log//clr-dropdown//a[contains(.,'Others')]
Retry Element Click xpath=//audit-log//hbr-filter//clr-icon
Retry Text Input xpath=//audit-log//hbr-filter//input harbor
Retry Text Input xpath=//audit-log//hbr-filter//input harbor-jobservice
Sleep 1
${rc} = Get Element Count //audit-log//clr-dg-row
Should Be Equal As Integers ${rc} 0
Should Be Equal As Integers ${rc} 1
Retry Click Repo Name
[Arguments] ${repo_name_element}

View File

@ -21,7 +21,7 @@ Resource ../../resources/Util.robot
*** Keywords ***
Create A Random Permission Item List
${permission_item_all_list}= Create List Push&Pull Artifact
${permission_item_all_list}= Create List Push Artifact
... Pull Artifact
... Delete Artifact
... Read Helm Chart
@ -42,6 +42,9 @@ Create A Random Permission Item List
Run Keyword If '${r}'=='1' Append To List ${tmp_list} ${permission_item_all_list}[${i}]
END
Run Keyword If ${tmp_list}==@{EMPTY} Append To List ${tmp_list} ${permission_item_all_list}[${0}]
${push_pos}= Get Index From List ${tmp_list} ${permission_item_all_list}[${0}]
${pull_pos}= Get Index From List ${tmp_list} ${permission_item_all_list}[${1}]
Run Keyword If '${push_pos}' >= '${0}' and '${pull_pos}'=='${-1}' Append To List ${tmp_list} ${permission_item_all_list}[${1}]
[Return] ${tmp_list}
Create A Random Project Permission List

View File

@ -94,3 +94,16 @@ Set Default Scanner
[Arguments] ${scanner_name}
Retry Element Click //clr-dg-row[contains(.,'${scanner_name}')]//clr-radio-wrapper/label
Retry Double Keywords When Error Retry Element Click ${scanner_set_default} Retry Wait Until Page Contains Element ${scanner_set_default_success_xpath}
Check Listed In CVE Allowlist
[Arguments] ${project_name} ${image} ${tag} ${cve_id} ${is_in}=Yes
Go Into Project ${project_name}
Go Into Repo ${project_name}/${image}
Go Into Artifact ${tag}
Scroll Element Into View //clr-dg-row[contains(.,'${cve_id}')]
${text}= Get Text //clr-dg-row[contains(.,'${cve_id}')]//clr-dg-cell[7]
Capture Page Screenshot
Log All is_in_allow_list:${text}
Should Be Equal As Strings '${text}' '${is_in}'

View File

@ -226,7 +226,6 @@ Helm3 CLI Push Without Sign In Harbor
#Important Note: All CVE IDs in CVE Allowlist cases must unique!
Body Of Verfiy System Level CVE Allowlist
[Arguments] ${image_argument} ${sha256_argument} ${most_cve_list} ${single_cve}
[Tags] run-once
Init Chrome Driver
${d}= Get Current Date result_format=%m%s
${image}= Set Variable ${image_argument}
@ -240,12 +239,14 @@ Body Of Verfiy System Level CVE Allowlist
Push Image ${ip} ${signin_user} ${signin_pwd} project${d} ${image} sha256=${sha256}
Go Into Project project${d}
Set Vulnerabilty Serverity 2
Cannot Pull Image ${ip} ${signin_user} ${signin_pwd} project${d} ${image} tag=${sha256} err_msg=current image without vulnerability scanning cannot be pulled due to configured policy
Cannot Pull Image ${ip} ${signin_user} ${signin_pwd} project${d} ${image} tag=${sha256} err_msg=cannot be pulled due to configured policy
Go Into Project project${d}
Go Into Repo project${d}/${image}
Scan Repo ${sha256} Succeed
Logout Harbor
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Check Listed In CVE Allowlist project${d} ${image} ${sha256} ${single_cve} is_in=No
Switch To Configure
Switch To Configuration System Setting
# Add Items To System CVE Allowlist CVE-2019-19317\nCVE-2019-19646 \nCVE-2019-5188 \nCVE-2019-20387 \nCVE-2019-17498 \nCVE-2019-20372 \nCVE-2019-19244 \nCVE-2019-19603 \nCVE-2019-19880 \nCVE-2019-19923 \nCVE-2019-19925 \nCVE-2019-19926 \nCVE-2019-19959 \nCVE-2019-20218 \nCVE-2019-19232 \nCVE-2019-19234 \nCVE-2019-19645
@ -256,6 +257,8 @@ Body Of Verfiy System Level CVE Allowlist
Pull Image ${ip} ${signin_user} ${signin_pwd} project${d} ${image} tag=${sha256}
Delete Top Item In System CVE Allowlist count=16
Cannot Pull Image ${ip} ${signin_user} ${signin_pwd} project${d} ${image} tag=${sha256} err_msg=cannot be pulled due to configured policy
Check Listed In CVE Allowlist project${d} ${image} ${sha256} ${single_cve}
Close Browser
Body Of Verfiy Project Level CVE Allowlist
@ -347,19 +350,20 @@ Body Of Replication Of Push Images to Registry Triggered By Event
Executions Result Count Should Be Succeeded event_based 2
Body Of Replication Of Pull Images from Registry To Self
[Arguments] ${provider} ${endpoint} ${username} ${pwd} ${project_name} @{target_images}
[Arguments] ${provider} ${endpoint} ${username} ${pwd} ${src_project_name} ${des_project_name} @{target_images}
Init Chrome Driver
${d}= Get Current Date result_format=%m%s
${_des_pro_name}= Set Variable If '${des_project_name}'=='${null}' project${d} ${des_project_name}
#login source
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Create An New Project And Go Into Project project${d}
Run Keyword If '${des_project_name}'=='${null}' Create An New Project And Go Into Project ${_des_pro_name}
Switch To Registries
Create A New Endpoint ${provider} e${d} ${endpoint} ${username} ${pwd} Y
Switch To Replication Manage
Create A Rule With Existing Endpoint rule${d} pull ${project_name} image e${d} project${d}
Create A Rule With Existing Endpoint rule${d} pull ${src_project_name} image e${d} ${_des_pro_name}
Select Rule And Replicate rule${d}
FOR ${item} IN @{target_images}
Log To Console Check image replicated to Project project${d} ${item}
Image Should Be Replicated To Project project${d} ${item} times=2
Log To Console Check image replicated to Project ${_des_pro_name} ${item}
Image Should Be Replicated To Project ${_des_pro_name} ${item} times=2
END
Close Browser

View File

@ -227,14 +227,25 @@ Test Case - User View Logs
${d}= Get Current Date result_format=%m%s
${img}= Set Variable kong
${tag}= Set Variable latest
${replication_image}= Set Variable for_log_view
${replication_tag}= Set Variable base
@{target_images}= Create List ${replication_image}
${user}= Set Variable user002
${pwd}= Set Variable Test1@34
Sign In Harbor ${HARBOR_URL} user002 Test1@34
Sign In Harbor ${HARBOR_URL} ${user} ${pwd}
Create An New Project And Go Into Project project${d}
Logout Harbor
Push image ${ip} user002 Test1@34 project${d} ${img}:${tag}
Pull image ${ip} user002 Test1@34 project${d} ${img}:${tag}
Body Of Replication Of Pull Images from Registry To Self harbor https://cicd.harbor.vmwarecna.net ${null} ${null} nightly/${replication_image} project${d} @{target_images}
Push image ${ip} ${user} ${pwd} project${d} ${img}:${tag}
Pull image ${ip} ${user} ${pwd} project${d} ${replication_image}:${replication_tag}
Init Chrome Driver
Sign In Harbor ${HARBOR_URL} ${user} ${pwd}
Go Into Project project${d}
Delete Repo project${d} ${replication_image}
Delete Repo project${d} ${img}
Sleep 3
@ -696,40 +707,6 @@ Test Case - Read Only Mode
Push image ${ip} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} project${d} busybox:latest
Close Browser
Test Case - Proxy Cache
[Tags] run-once proxy_cache
${d}= Get Current Date result_format=%m%s
${registry}= Set Variable https://cicd.harbor.vmwarecna.net
${user_namespace}= Set Variable nightly
${image}= Set Variable for_proxy
${tag}= Set Variable 1.0
${manifest_index}= Set Variable index081597864867
${manifest_tag}= Set Variable index_tag081597864867
${test_user}= Set Variable user010
${test_pwd}= Set Variable Test1@34
Init Chrome Driver
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Switch To Registries
Create A New Endpoint harbor e1${d} ${registry} ${null} ${null}
Create An New Project And Go Into Project project${d} proxy_cache=${true} registry=e1${d}
Manage Project Member Without Sign In project${d} ${test_user} Add has_image=${false}
Go Into Project project${d} has_image=${false}
Change Member Role ${test_user} Developer
Cannot Push image ${ip} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} project${d} busybox:latest err_msg=can not push artifact to a proxy project
Cannot Push image ${ip} ${test_user} ${test_pwd} project${d} busybox:latest err_msg=can not push artifact to a proxy project
Pull Image ${ip} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} project${d} ${user_namespace}/${image} tag=${tag}
Pull Image ${ip} ${test_user} ${test_pwd} project${d} ${user_namespace}/${manifest_index} tag=${manifest_tag}
Log To Console Start to Sleep 3 minitues......
Sleep 180
Go Into Project project${d}
Go Into Repo project${d}/${user_namespace}/${image}
Log To Console Start to Sleep 10 minitues......
Sleep 500
Go Into Project project${d}
Go Into Repo project${d}/${user_namespace}/${manifest_index}
Go Into Index And Contain Artifacts ${manifest_tag} limit=1
Close Browser
Test Case - Distribution CRUD
${d}= Get Current Date result_format=%m%s
${name}= Set Variable distribution${d}

View File

@ -150,7 +150,7 @@ Test Case - Replication Rule Delete
Test Case - Replication Of Pull Images from DockerHub To Self
@{target_images}= Create List mariadb centos
Body Of Replication Of Pull Images from Registry To Self docker-hub https://hub.docker.com/ ${DOCKER_USER} ${DOCKER_PWD} ${DOCKER_USER}/{cent*,mariadb} @{target_images}
Body Of Replication Of Pull Images from Registry To Self docker-hub https://hub.docker.com/ ${DOCKER_USER} ${DOCKER_PWD} ${DOCKER_USER}/{cent*,mariadb} ${null} @{target_images}
Test Case - Replication Of Push Images from Self To Harbor
Init Chrome Driver
@ -281,7 +281,7 @@ Test Case - Replication Of Push Images to AWS-ECR Triggered By Event
Test Case - Replication Of Pull Images from Gitlab To Self
@{target_images}= Create List photon alpine
Body Of Replication Of Pull Images from Registry To Self gitlab https://registry.gitlab.com ${gitlab_id} ${gitlab_key} dannylunsa/test_replication/{photon,alpine} @{target_images}
Body Of Replication Of Pull Images from Registry To Self gitlab https://registry.gitlab.com ${gitlab_id} ${gitlab_key} dannylunsa/test_replication/{photon,alpine} ${null} @{target_images}
Test Case - Replication Of Push Images to Gitlab Triggered By Event
Body Of Replication Of Push Images to Registry Triggered By Event gitlab https://registry.gitlab.com ${gitlab_id} ${gitlab_key} dannylunsa/test_replication

View File

@ -24,6 +24,49 @@ ${SSH_USER} root
${HARBOR_ADMIN} admin
*** Test Cases ***
# Due to Docker 20's new behavior, let 'Proxy Cache' be the 1st case to run
# and at the same time all images to be pull among all cases should be not exsit before pulling.
Test Case - Proxy Cache
[Tags] proxy_cache
${d}= Get Current Date result_format=%m%s
${registry}= Set Variable https://cicd.harbor.vmwarecna.net
${user_namespace}= Set Variable nightly
${image}= Set Variable for_proxy
${tag}= Set Variable 1.0
${manifest_index}= Set Variable alpine
${manifest_tag}= Set Variable 3.12.0
${test_user}= Set Variable user010
${test_pwd}= Set Variable Test1@34
Init Chrome Driver
Sign In Harbor ${HARBOR_URL} ${HARBOR_ADMIN} ${HARBOR_PASSWORD}
Switch To Registries
Create A New Endpoint harbor e1${d} ${registry} ${null} ${null}
Create An New Project And Go Into Project project${d} proxy_cache=${true} registry=e1${d}
Manage Project Member Without Sign In project${d} ${test_user} Add has_image=${false}
Go Into Project project${d} has_image=${false}
Change Member Role ${test_user} Developer
Pull Image ${ip} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} project${d} ${user_namespace}/${image} tag=${tag}
Pull Image ${ip} ${test_user} ${test_pwd} project${d} ${user_namespace}/${manifest_index} tag=${manifest_tag}
Log To Console Start to Sleep 3 minitues......
Sleep 180
Go Into Project project${d}
Go Into Repo project${d}/${user_namespace}/${image}
FOR ${idx} IN RANGE 0 15
Log All Checking manifest ${idx} round......
Sleep 60
Go Into Project project${d}
${repo_out}= Run Keyword And Ignore Error Go Into Repo project${d}/${user_namespace}/${manifest_index}
Continue For Loop If '${repo_out[0]}'=='FAIL'
${artifact_out}= Run Keyword And Ignore Error Go Into Index And Contain Artifacts ${manifest_tag} limit=1
Exit For Loop If '${artifact_out[0]}'=='PASS'
END
Should Be Equal As Strings '${artifact_out[0]}' 'PASS'
Cannot Push image ${ip} ${HARBOR_ADMIN} ${HARBOR_PASSWORD} project${d} busybox:latest err_msg=can not push artifact to a proxy project
Cannot Push image ${ip} ${test_user} ${test_pwd} project${d} busybox:latest err_msg=can not push artifact to a proxy project
Close Browser
Test Case - GC Schedule Job
[tags] GC_schedule
Init Chrome Driver

View File

@ -111,6 +111,7 @@ Test Case - Project Level Image Serverity Policy
#Important Note: All CVE IDs in CVE Allowlist cases must unique!
Test Case - Verfiy System Level CVE Allowlist
[Tags] sys_cve
Body Of Verfiy System Level CVE Allowlist goharbor/harbor-portal 2cb6a1c24dd6b88f11fd44ccc6560cb7be969f8ac5f752802c99cae6bcd592bb CVE-2019-19317\nCVE-2019-19646 \nCVE-2019-5188 \nCVE-2019-20387 \nCVE-2019-17498 \nCVE-2019-20372 \nCVE-2019-19244 \nCVE-2019-19603 \nCVE-2019-19880 \nCVE-2019-19923 \nCVE-2019-19925 \nCVE-2019-19926 \nCVE-2019-19959 \nCVE-2019-20218 \nCVE-2019-19232 \nCVE-2019-19234 \nCVE-2019-19645 CVE-2019-18276
Test Case - Verfiy Project Level CVE Allowlist