2018-07-25 11:46:09 +02:00
|
|
|
import os
|
2019-10-29 11:18:05 +01:00
|
|
|
import sys
|
2018-07-25 11:46:09 +02:00
|
|
|
import json
|
2020-09-15 12:51:40 +02:00
|
|
|
import time
|
2018-07-25 11:46:09 +02:00
|
|
|
import argparse
|
|
|
|
import requests
|
2020-09-15 12:51:40 +02:00
|
|
|
import urllib
|
2019-10-29 11:18:05 +01:00
|
|
|
from functools import wraps
|
2018-07-25 11:46:09 +02:00
|
|
|
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
|
|
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='The script to generate data for harbor v1.4.0')
|
|
|
|
parser.add_argument('--endpoint', '-e', dest='endpoint', required=True, help='The endpoint to harbor')
|
2019-02-27 07:27:39 +01:00
|
|
|
parser.add_argument('--version', '-v', dest='version', required=False, help='The version to harbor')
|
2020-09-15 12:51:40 +02:00
|
|
|
parser.add_argument('--libpath', '-l', dest='libpath', required=False, help='e2e library')
|
2019-02-27 07:27:39 +01:00
|
|
|
args = parser.parse_args()
|
2018-07-25 11:46:09 +02:00
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
from os import path
|
|
|
|
sys.path.append(args.libpath)
|
|
|
|
sys.path.append(args.libpath + "/library")
|
|
|
|
from library.docker_api import docker_manifest_push_to_harbor
|
|
|
|
from library.repository import Repository
|
2020-12-04 11:28:29 +01:00
|
|
|
from library.repository import push_self_build_image_to_project
|
2020-09-15 12:51:40 +02:00
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
url = "https://"+args.endpoint+"/api/"
|
2019-05-30 09:59:09 +02:00
|
|
|
endpoint_url = "https://"+args.endpoint
|
2020-08-10 06:02:38 +02:00
|
|
|
print(url)
|
2018-07-25 11:46:09 +02:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
with open("feature_map.json") as f:
|
|
|
|
feature_map = json.load(f)
|
|
|
|
|
|
|
|
def get_branch(func_name, version):
|
|
|
|
has_feature = False
|
|
|
|
for node in feature_map[func_name]:
|
|
|
|
has_feature = True
|
|
|
|
if node["version"] == version:
|
|
|
|
return node["branch"]
|
|
|
|
if has_feature is False:
|
|
|
|
return "No Restriction"
|
|
|
|
else:
|
|
|
|
return "Not Supported"
|
|
|
|
|
|
|
|
def get_feature_branch(func):
|
|
|
|
@wraps(func)
|
|
|
|
def inner_func(*args,**kwargs):
|
|
|
|
branch=get_branch(inner_func.__name__, kwargs.get("version"))
|
|
|
|
if branch == "No Restriction":
|
|
|
|
func(*args,**kwargs)
|
|
|
|
elif branch == "Not Supported":
|
|
|
|
print("Feature {} is not supported in version {}".format(inner_func.__name__, kwargs.get("version")))
|
|
|
|
else:
|
|
|
|
kwargs["branch"] = branch
|
|
|
|
func(*args,**kwargs)
|
|
|
|
return
|
|
|
|
return inner_func
|
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
class HarborAPI:
|
2020-09-22 11:27:05 +02:00
|
|
|
@get_feature_branch
|
|
|
|
def populate_projects(self, **kwargs):
|
|
|
|
for project in data["projects"]:
|
2021-02-25 04:57:27 +01:00
|
|
|
if kwargs["branch"] in [1,2]:
|
|
|
|
if "registry_name" in project:
|
|
|
|
print("Populate proxy project...")
|
|
|
|
# continue
|
2020-09-22 11:27:05 +02:00
|
|
|
elif kwargs["branch"] == 3:
|
2021-02-25 04:57:27 +01:00
|
|
|
print("Populate all projects...")
|
2020-09-22 11:27:05 +02:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
|
|
|
self.create_project(project, version=args.version)
|
|
|
|
for member in project["member"]:
|
|
|
|
self.add_member(project["name"], member["name"], member["role"], version=args.version)
|
|
|
|
for robot_account in project["robot_account"]:
|
|
|
|
self.add_project_robot_account(project["name"], robot_account, version=args.version)
|
|
|
|
self.add_p2p_preheat_policy(project, version=args.version)
|
|
|
|
self.add_webhook(project["name"], project["webhook"], version=args.version)
|
|
|
|
if project["tag_retention_rule"] is not None:
|
|
|
|
self.add_tag_retention_rule(project["name"], project["tag_retention_rule"], version=args.version)
|
|
|
|
self.add_tag_immutability_rule(project["name"], project["tag_immutability_rule"], version=args.version)
|
|
|
|
self.update_project_setting_metadata(project["name"],
|
|
|
|
project["configuration"]["public"],
|
|
|
|
project["configuration"]["enable_content_trust"],
|
|
|
|
project["configuration"]["prevent_vul"],
|
|
|
|
project["configuration"]["severity"],
|
|
|
|
project["configuration"]["auto_scan"])
|
|
|
|
self.update_project_setting_allowlist(project["name"],
|
|
|
|
project["configuration"]["reuse_sys_cve_allowlist"],
|
|
|
|
project["configuration"]["deployment_security"], version=args.version)
|
|
|
|
time.sleep(30)
|
|
|
|
|
2021-02-25 04:57:27 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def populate_quotas(self, **kwargs):
|
|
|
|
for quotas in data["quotas"]:
|
|
|
|
self.create_project(quotas, version=args.version)
|
|
|
|
push_self_build_image_to_project(quotas["name"], args.endpoint, 'admin', 'Harbor12345', quotas["name"], "latest", size=quotas["size"])
|
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def create_project(self, project, **kwargs):
|
|
|
|
if kwargs["branch"] == 1:
|
2020-09-22 11:27:05 +02:00
|
|
|
body=dict(body={"project_name": project["name"], "metadata": {"public": "true"}})
|
|
|
|
request(url+"projects", 'post', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
elif kwargs["branch"] == 2:
|
2020-09-22 11:27:05 +02:00
|
|
|
body=dict(body={"project_name": project["name"], "metadata": {"public": "true"},"count_limit":project["count_limit"],"storage_limit":project["storage_limit"]})
|
|
|
|
request(url+"projects", 'post', **body)
|
2020-09-15 12:51:40 +02:00
|
|
|
elif kwargs["branch"] == 3:
|
2021-02-25 04:57:27 +01:00
|
|
|
if project.get("registry_name") is not None:
|
2020-09-15 12:51:40 +02:00
|
|
|
r = request(url+"registries?name="+project["registry_name"]+"", 'get')
|
|
|
|
registry_id = int(str(r.json()[0]['id']))
|
|
|
|
else:
|
2020-09-22 11:27:05 +02:00
|
|
|
registry_id = None
|
2020-09-15 12:51:40 +02:00
|
|
|
body=dict(body={"project_name": project["name"], "registry_id":registry_id, "metadata": {"public": "true"},"storage_limit":project["storage_limit"]})
|
|
|
|
request(url+"projects", 'post', **body)
|
2020-09-22 11:27:05 +02:00
|
|
|
|
2021-02-25 04:57:27 +01:00
|
|
|
#Project with registry_name is a proxy project, there should be images can be pulled.
|
|
|
|
if project.get("registry_name") is not None:
|
2020-09-15 12:51:40 +02:00
|
|
|
USER_ADMIN=dict(endpoint = "https://"+args.endpoint+"/api/v2.0" , username = "admin", password = "Harbor12345")
|
|
|
|
repo = Repository()
|
|
|
|
for _repo in project["repo"]:
|
|
|
|
pull_image(args.endpoint+"/"+ project["name"]+"/"+_repo["cache_image_namespace"]+"/"+_repo["cache_image"])
|
|
|
|
time.sleep(180)
|
|
|
|
repo_name = urllib.parse.quote(_repo["cache_image_namespace"]+"/"+_repo["cache_image"],'utf-8')
|
|
|
|
repo_data = repo.get_repository(project["name"], repo_name, **USER_ADMIN)
|
|
|
|
return
|
2019-10-29 11:18:05 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
def create_user(self, username):
|
2019-02-27 07:27:39 +01:00
|
|
|
payload = {"username":username, "email":username+"@vmware.com", "password":"Harbor12345", "realname":username, "comment":"string"}
|
2019-01-11 06:40:08 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"users", 'post', **body)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def set_user_admin(self, user, **kwargs):
|
2019-01-11 06:40:08 +01:00
|
|
|
r = request(url+"users?username="+user+"", 'get')
|
2018-07-25 11:46:09 +02:00
|
|
|
userid = str(r.json()[0]['user_id'])
|
2019-10-29 11:18:05 +01:00
|
|
|
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
body=dict(body={"has_admin_role": 1})
|
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
body=dict(body={"has_admin_role": True})
|
2020-09-15 12:51:40 +02:00
|
|
|
elif kwargs["branch"] == 3:
|
|
|
|
body=dict(body={"sysadmin_flag": True, "user_id":int(userid)})
|
2019-02-27 07:27:39 +01:00
|
|
|
else:
|
2019-10-29 11:18:05 +01:00
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
2019-01-11 06:40:08 +01:00
|
|
|
request(url+"users/"+userid+"/sysadmin", 'put', **body)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def add_member(self, project, user, role, **kwargs):
|
2019-01-11 06:40:08 +01:00
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
2018-07-25 11:46:09 +02:00
|
|
|
projectid = str(r.json()[0]['project_id'])
|
2019-10-29 11:18:05 +01:00
|
|
|
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {"roles": [role], "username":""+user+""}
|
|
|
|
elif kwargs["branch"] == 2:
|
2019-02-27 07:27:39 +01:00
|
|
|
payload = {"member_user":{ "username": ""+user+""},"role_id": role}
|
|
|
|
else:
|
2019-10-29 11:18:05 +01:00
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
2019-01-11 06:40:08 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"projects/"+projectid+"/members", 'post', **body)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
@get_feature_branch
|
|
|
|
def add_p2p_preheat_policy(self, project, **kwargs):
|
|
|
|
r = request(url+"projects?name="+project["name"]+"", 'get')
|
|
|
|
projectid = int(str(r.json()[0]['project_id']))
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
if project["p2p_preheat_policy"] is not None:
|
|
|
|
instances = request(url+"p2p/preheat/instances", 'get')
|
|
|
|
if len(instances.json()) == 0:
|
|
|
|
raise Exception(r"Please add p2p preheat instances first.")
|
|
|
|
for instance in instances.json():
|
|
|
|
print("instance:", instance)
|
|
|
|
for policy in project["p2p_preheat_policy"]:
|
|
|
|
instance_str = [str(item) for item in instances]
|
|
|
|
if policy["provider_name"] in ''.join(instance_str):
|
|
|
|
print("policy:", policy)
|
|
|
|
if instance['name'] == policy["provider_name"]:
|
|
|
|
payload = {
|
|
|
|
"provider_id":int(instance['id']),
|
|
|
|
"name":policy["name"],
|
|
|
|
"filters":policy["filters"],
|
|
|
|
"trigger":policy["trigger"],
|
|
|
|
"project_id":projectid,
|
|
|
|
"enabled":policy["enabled"]
|
|
|
|
}
|
|
|
|
body=dict(body=payload)
|
|
|
|
print(body)
|
|
|
|
request(url+"projects/"+project["name"]+"/preheat/policies", 'post', **body)
|
|
|
|
else:
|
|
|
|
raise Exception(r"Please verify if distribution {} has beed created.".format(policy["provider_name"]))
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def add_endpoint(self, endpointurl, endpointname, username, password, insecure, registry_type, **kwargs):
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {"endpoint": ""+endpointurl+"", "name": ""+endpointname+"", "username": ""+username+"", "password": ""+password+"", "insecure": insecure}
|
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"targets", 'post', **body)
|
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
payload = {
|
|
|
|
"credential":{
|
|
|
|
"access_key":""+username+"",
|
|
|
|
"access_secret":""+password+"",
|
|
|
|
"type":"basic"
|
|
|
|
},
|
|
|
|
"insecure":insecure,
|
|
|
|
"name":""+endpointname+"",
|
|
|
|
"type":""+registry_type+"",
|
|
|
|
"url":""+endpointurl+""
|
|
|
|
}
|
|
|
|
body=dict(body=payload)
|
2020-08-10 06:02:38 +02:00
|
|
|
print(body)
|
2020-09-15 12:51:40 +02:00
|
|
|
request(url+"registries", 'post', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def add_replication_rule(self, replicationrule, **kwargs):
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
r = request(url+"projects?name="+replicationrule["project"]+"", 'get')
|
|
|
|
projectid = r.json()[0]['project_id']
|
|
|
|
r = request(url+"targets?name="+replicationrule["endpoint"]+"", 'get')
|
|
|
|
targetid = r.json()[0]['id']
|
|
|
|
payload = {"name": ""+replicationrule["rulename"]+"", "description": "string", "projects": [{"project_id": projectid,}], "targets": [{"id": targetid,}], "trigger": {"kind": ""+replicationrule["trigger"]+"", "schedule_param": {"type": "weekly", "weekday": 1, "offtime": 0}}}
|
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"policies/replication", 'post', **body)
|
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
r = request(url+"registries?name="+replicationrule["endpoint"]+"", 'get')
|
2020-11-10 10:33:09 +01:00
|
|
|
print("response:", r)
|
2019-10-29 11:18:05 +01:00
|
|
|
targetid = r.json()[0]['id']
|
|
|
|
if replicationrule["is_src_registry"] is True:
|
|
|
|
registry = r'"src_registry": { "id": '+str(targetid)+r'},'
|
|
|
|
else:
|
|
|
|
registry = r'"dest_registry": { "id": '+str(targetid)+r'},'
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2020-08-10 06:02:38 +02:00
|
|
|
body=dict(body=json.loads(r'{"name":"'+replicationrule["rulename"]+r'","dest_namespace":"'+replicationrule["dest_namespace"]+r'","deletion": '+str(replicationrule["deletion"]).lower()+r',"enabled": '+str(replicationrule["enabled"]).lower()+r',"override": '+str(replicationrule["override"]).lower()+r',"description": "string",'+ registry + r'"trigger":{"type": "'+replicationrule["trigger_type"]+r'", "trigger_settings":{"cron":"'+replicationrule["cron"]+r'"}},"filters":[ {"type":"name","value":"'+replicationrule["name_filters"]+r'"},{"type":"tag","value":"'+replicationrule["tag_filters"]+r'"}]}'))
|
|
|
|
print(body)
|
2019-10-29 11:18:05 +01:00
|
|
|
request(url+"replication/policies", 'post', **body)
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
|
|
|
|
|
|
|
#@get_feature_branch
|
|
|
|
def update_project_setting_metadata(self, project, public, contenttrust, preventrunning, preventseverity, scanonpush):
|
2019-01-11 06:40:08 +01:00
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
2018-07-25 11:46:09 +02:00
|
|
|
projectid = str(r.json()[0]['project_id'])
|
|
|
|
payload = {
|
|
|
|
"metadata": {
|
2019-09-24 12:35:19 +02:00
|
|
|
"public": public,
|
2018-07-25 11:46:09 +02:00
|
|
|
"enable_content_trust": contenttrust,
|
2019-10-29 11:18:05 +01:00
|
|
|
"prevent_vul": preventrunning,
|
|
|
|
"severity": preventseverity,
|
|
|
|
"auto_scan": scanonpush
|
2018-07-25 11:46:09 +02:00
|
|
|
}
|
|
|
|
}
|
2019-01-11 06:40:08 +01:00
|
|
|
body=dict(body=payload)
|
2020-08-10 06:02:38 +02:00
|
|
|
print(body)
|
2019-01-11 06:40:08 +01:00
|
|
|
request(url+"projects/"+projectid+"", 'put', **body)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
2020-06-22 04:34:03 +02:00
|
|
|
def add_sys_allowlist(self, cve_id_list, **kwargs):
|
2019-10-29 11:18:05 +01:00
|
|
|
cve_id_str = ""
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
for index, cve_id in enumerate(cve_id_list["cve"]):
|
|
|
|
cve_id_str = cve_id_str + '{"cve_id":"' +cve_id["id"] + '"}'
|
|
|
|
if index != len(cve_id_list["cve"]) - 1:
|
|
|
|
cve_id_str = cve_id_str + ","
|
2020-08-10 06:02:38 +02:00
|
|
|
body=dict(body=json.loads(r'{"items":['+cve_id_str+r'],"expires_at":'+cve_id_list["expires_at"]+'}'))
|
2019-10-29 11:18:05 +01:00
|
|
|
request(url+"system/CVEWhitelist", 'put', **body)
|
2020-09-15 12:51:40 +02:00
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
for index, cve_id in enumerate(cve_id_list["cve"]):
|
|
|
|
cve_id_str = cve_id_str + '{"cve_id":"' +cve_id["id"] + '"}'
|
|
|
|
if index != len(cve_id_list["cve"]) - 1:
|
|
|
|
cve_id_str = cve_id_str + ","
|
|
|
|
body=dict(body=json.loads(r'{"items":['+cve_id_str+r'],"expires_at":'+cve_id_list["expires_at"]+'}'))
|
|
|
|
request(url+"system/CVEAllowlist", 'put', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
|
|
|
|
|
|
|
@get_feature_branch
|
2020-06-22 04:34:03 +02:00
|
|
|
def update_project_setting_allowlist(self, project, reuse_sys_cve_whitelist, cve_id_list, **kwargs):
|
2019-10-29 11:18:05 +01:00
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
|
|
|
projectid = str(r.json()[0]['project_id'])
|
|
|
|
cve_id_str = ""
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
for index, cve_id in enumerate(cve_id_list["cve"]):
|
|
|
|
cve_id_str = cve_id_str + '{"cve_id":"' +cve_id["id"] + '"}'
|
|
|
|
if index != len(cve_id_list["cve"]) - 1:
|
|
|
|
cve_id_str = cve_id_str + ","
|
2020-08-10 06:02:38 +02:00
|
|
|
print(cve_id_str)
|
2019-10-29 11:18:05 +01:00
|
|
|
if reuse_sys_cve_whitelist == "true":
|
|
|
|
payload = r'{"metadata":{"reuse_sys_cve_whitelist":"true"}}'
|
|
|
|
else:
|
2020-08-10 06:02:38 +02:00
|
|
|
payload = r'{"metadata":{"reuse_sys_cve_whitelist":"false"},"cve_whitelist":{"project_id":'+projectid+',"items":['+cve_id_str+r'],"expires_at":'+cve_id_list["expires_at"]+'}}'
|
|
|
|
print(payload)
|
2019-10-29 11:18:05 +01:00
|
|
|
body=dict(body=json.loads(payload))
|
|
|
|
request(url+"projects/"+projectid+"", 'put', **body)
|
2020-09-15 12:51:40 +02:00
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
for index, cve_id in enumerate(cve_id_list["cve"]):
|
|
|
|
cve_id_str = cve_id_str + '{"cve_id":"' +cve_id["id"] + '"}'
|
|
|
|
if index != len(cve_id_list["cve"]) - 1:
|
|
|
|
cve_id_str = cve_id_str + ","
|
|
|
|
print(cve_id_str)
|
|
|
|
if reuse_sys_cve_whitelist == "true":
|
|
|
|
payload = r'{"metadata":{"reuse_sys_cve_allowlist":"true"}}'
|
|
|
|
else:
|
|
|
|
payload = r'{"metadata":{"reuse_sys_cve_allowlist":"false"},"cve_whitelist":{"project_id":'+projectid+',"items":['+cve_id_str+r'],"expires_at":'+cve_id_list["expires_at"]+'}}'
|
|
|
|
print(payload)
|
|
|
|
body=dict(body=json.loads(payload))
|
|
|
|
request(url+"projects/"+projectid+"", 'put', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
|
|
|
|
2020-05-08 05:52:42 +02:00
|
|
|
@get_feature_branch
|
|
|
|
def update_interrogation_services(self, cron, **kwargs):
|
|
|
|
payload = {"schedule":{"type":"Custom","cron": cron}}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2020-05-08 05:52:42 +02:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"system/scanAll/schedule", 'post', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
|
2020-05-08 05:52:42 +02:00
|
|
|
def update_systemsetting(self, emailfrom, emailhost, emailport, emailuser, creation, selfreg, token, robot_token):
|
2018-07-25 11:46:09 +02:00
|
|
|
payload = {
|
|
|
|
"auth_mode": "db_auth",
|
|
|
|
"email_from": emailfrom,
|
|
|
|
"email_host": emailhost,
|
|
|
|
"email_port": emailport,
|
|
|
|
"email_identity": "string",
|
|
|
|
"email_username": emailuser,
|
|
|
|
"email_ssl": True,
|
|
|
|
"email_insecure": True,
|
|
|
|
"project_creation_restriction": creation,
|
|
|
|
"read_only": False,
|
|
|
|
"self_registration": selfreg,
|
|
|
|
"token_expiration": token,
|
2020-05-08 05:52:42 +02:00
|
|
|
"robot_token_duration":robot_token,
|
2018-07-25 11:46:09 +02:00
|
|
|
"scan_all_policy": {
|
|
|
|
"type": "none",
|
|
|
|
"parameter": {
|
|
|
|
"daily_time": 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2019-01-11 06:40:08 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"configurations", 'put', **body)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-10-29 11:18:05 +01:00
|
|
|
@get_feature_branch
|
|
|
|
def add_project_robot_account(self, project, robot_account, **kwargs):
|
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
|
|
|
projectid = str(r.json()[0]['project_id'])
|
|
|
|
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
if len(robot_account["access"]) == 1:
|
|
|
|
robot_account_ac = robot_account["access"][0]
|
|
|
|
payload = {
|
|
|
|
"name": robot_account["name"],
|
|
|
|
"access": [
|
|
|
|
{
|
|
|
|
"resource": "/project/"+projectid+"/repository",
|
|
|
|
"action": robot_account_ac["action"]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
elif len(robot_account["access"]) == 2:
|
|
|
|
payload = {
|
|
|
|
"name": robot_account["name"],
|
|
|
|
"access": [
|
|
|
|
{
|
|
|
|
"resource": "/project/"+projectid+"/repository",
|
|
|
|
"action": "pull"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"resource": "/project/"+projectid+"/repository",
|
|
|
|
"action": "push"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Robot account count {} is not legal!".format(len(robot_account["access"])))
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, branch))
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2019-10-29 11:18:05 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"projects/"+projectid+"/robots", 'post', **body)
|
|
|
|
|
|
|
|
@get_feature_branch
|
2020-01-10 07:40:02 +01:00
|
|
|
def add_tag_retention_rule(self, project, tag_retention_rule, **kwargs):
|
2020-09-15 12:51:40 +02:00
|
|
|
if tag_retention_rule is None:
|
|
|
|
print(r"No tag retention rule to be populated for project {}.".format(project))
|
2020-01-10 07:40:02 +01:00
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
|
|
|
projectid = str(r.json()[0]['project_id'])
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {
|
|
|
|
"algorithm":"or",
|
|
|
|
"rules":
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"disabled":False,
|
|
|
|
"action":"retain",
|
|
|
|
"scope_selectors":
|
|
|
|
{
|
|
|
|
"repository":
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"kind":"doublestar",
|
|
|
|
"decoration":"repoMatches",
|
|
|
|
"pattern":tag_retention_rule["repository_patten"]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
"tag_selectors":
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"kind":"doublestar",
|
|
|
|
"decoration":"matches","pattern":tag_retention_rule["tag_decoration"]
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"params":{"latestPushedK":tag_retention_rule["latestPushedK"]},
|
|
|
|
"template":"latestPushedK"
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"trigger":
|
|
|
|
{
|
|
|
|
"kind":"Schedule",
|
|
|
|
"references":{},
|
|
|
|
"settings":{"cron":tag_retention_rule["cron"]}
|
|
|
|
},
|
|
|
|
"scope":
|
|
|
|
{
|
|
|
|
"level":"project",
|
|
|
|
"ref":int(projectid)
|
|
|
|
}
|
|
|
|
}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2020-01-10 07:40:02 +01:00
|
|
|
body=dict(body=payload)
|
2020-09-15 12:51:40 +02:00
|
|
|
action = "post"
|
|
|
|
request(url+"retentions", action, **body)
|
2020-01-10 07:40:02 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, kwargs["branch"]))
|
|
|
|
|
|
|
|
@get_feature_branch
|
|
|
|
def add_tag_immutability_rule(self, project, tag_immutability_rule, **kwargs):
|
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
|
|
|
projectid = str(r.json()[0]['project_id'])
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {
|
|
|
|
"disabled":False,
|
|
|
|
"action":"immutable",
|
|
|
|
"scope_selectors":
|
|
|
|
{
|
|
|
|
"repository":
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"kind":"doublestar",
|
|
|
|
"decoration":tag_immutability_rule["repo_decoration"],
|
|
|
|
"pattern":tag_immutability_rule["repo_pattern"]
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
"tag_selectors":
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"kind":"doublestar",
|
|
|
|
"decoration":tag_immutability_rule["tag_decoration"],
|
|
|
|
"pattern":tag_immutability_rule["tag_pattern"]
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"project_id":int(projectid),
|
|
|
|
"priority":0,
|
|
|
|
"template":"immutable_template"
|
|
|
|
}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2020-01-10 07:40:02 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"projects/"+projectid+"/immutabletagrules", 'post', **body)
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, kwargs["branch"]))
|
2019-10-29 11:18:05 +01:00
|
|
|
|
|
|
|
@get_feature_branch
|
2020-01-10 07:40:02 +01:00
|
|
|
def add_webhook(self, project, webhook, **kwargs):
|
|
|
|
r = request(url+"projects?name="+project+"", 'get')
|
|
|
|
projectid = str(r.json()[0]['project_id'])
|
2019-10-29 11:18:05 +01:00
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {
|
|
|
|
"targets":[
|
|
|
|
{
|
2020-09-15 12:51:40 +02:00
|
|
|
"type":webhook["notify_type"],
|
2019-10-29 11:18:05 +01:00
|
|
|
"address":webhook["address"],
|
|
|
|
"skip_cert_verify":webhook["skip_cert_verify"],
|
|
|
|
"auth_header":webhook["auth_header"]
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"event_types":[
|
|
|
|
"downloadChart",
|
|
|
|
"deleteChart",
|
|
|
|
"uploadChart",
|
|
|
|
"deleteImage",
|
|
|
|
"pullImage",
|
|
|
|
"pushImage",
|
|
|
|
"scanningFailed",
|
|
|
|
"scanningCompleted"
|
|
|
|
],
|
2020-01-10 07:40:02 +01:00
|
|
|
"enabled":webhook["enabled"]
|
2019-10-29 11:18:05 +01:00
|
|
|
}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2019-10-29 11:18:05 +01:00
|
|
|
body=dict(body=payload)
|
2020-01-10 07:40:02 +01:00
|
|
|
request(url+"projects/"+projectid+"/webhook/policies", 'post', **body)
|
2020-09-15 12:51:40 +02:00
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
payload = {
|
|
|
|
"targets":[
|
|
|
|
{
|
|
|
|
"type":webhook["notify_type"],
|
|
|
|
"address":webhook["address"],
|
|
|
|
"skip_cert_verify":webhook["skip_cert_verify"],
|
|
|
|
"auth_header":webhook["auth_header"]
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"event_types":[
|
|
|
|
"DELETE_ARTIFACT",
|
|
|
|
"PULL_ARTIFACT",
|
|
|
|
"PUSH_ARTIFACT",
|
|
|
|
"DELETE_CHART",
|
|
|
|
"DOWNLOAD_CHART",
|
|
|
|
"UPLOAD_CHART",
|
|
|
|
"QUOTA_EXCEED",
|
|
|
|
"QUOTA_WARNING",
|
|
|
|
"REPLICATION",
|
|
|
|
"SCANNING_FAILED",
|
|
|
|
"SCANNING_COMPLETED"
|
|
|
|
],
|
|
|
|
"enabled":webhook["enabled"],
|
|
|
|
"name":webhook["name"]
|
|
|
|
}
|
|
|
|
print(payload)
|
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"projects/"+projectid+"/webhook/policies", 'post', **body)
|
2019-10-29 11:18:05 +01:00
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, kwargs["branch"]))
|
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
def update_repoinfo(self, reponame):
|
2019-01-11 06:40:08 +01:00
|
|
|
payload = {"description": "testdescription"}
|
2020-08-10 06:02:38 +02:00
|
|
|
print(payload)
|
2019-01-11 06:40:08 +01:00
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"repositories/"+reponame+"", 'put', **body)
|
2018-07-25 11:46:09 +02:00
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
@get_feature_branch
|
|
|
|
def add_distribution(self, distribution, **kwargs):
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
payload = {
|
|
|
|
"name":distribution["name"],
|
|
|
|
"endpoint":distribution["endpoint"],
|
|
|
|
"enabled":distribution["enabled"],
|
|
|
|
"vendor":distribution["vendor"],
|
|
|
|
"auth_mode":distribution["auth_mode"],
|
|
|
|
"insecure":distribution["insecure"]
|
|
|
|
}
|
|
|
|
print(payload)
|
|
|
|
body=dict(body=payload)
|
|
|
|
request(url+"p2p/preheat/instances", 'post', **body)
|
|
|
|
else:
|
|
|
|
raise Exception(r"Error: Feature {} has no branch {}.".format(sys._getframe().f_code.co_name, kwargs["branch"]))
|
|
|
|
|
|
|
|
@get_feature_branch
|
|
|
|
def get_ca(self, target='/harbor/ca/ca.crt', **kwargs):
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
url = "https://" + args.endpoint + "/api/systeminfo/getcert"
|
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
url = "https://" + args.endpoint + "/api/v2.0/systeminfo/getcert"
|
2019-01-11 06:40:08 +01:00
|
|
|
resp = request(url, 'get')
|
|
|
|
try:
|
|
|
|
ca_content = json.loads(resp.text)
|
|
|
|
except ValueError:
|
|
|
|
ca_content = resp.text
|
2018-07-25 11:46:09 +02:00
|
|
|
ca_path = '/harbor/ca'
|
|
|
|
if not os.path.exists(ca_path):
|
|
|
|
try:
|
|
|
|
os.makedirs(ca_path)
|
2020-08-10 06:02:38 +02:00
|
|
|
except Exception as e:
|
|
|
|
print(str(e))
|
2018-07-25 11:46:09 +02:00
|
|
|
pass
|
2020-08-10 06:02:38 +02:00
|
|
|
open(target, 'wb').write(ca_content.encode('utf-8'))
|
2018-07-25 11:46:09 +02:00
|
|
|
|
2020-09-07 05:33:27 +02:00
|
|
|
@get_feature_branch
|
2020-09-15 12:51:40 +02:00
|
|
|
def push_artifact_index(self, project, name, tag, **kwargs):
|
|
|
|
image_a = "alpine"
|
|
|
|
image_b = "busybox"
|
2020-12-04 11:28:29 +01:00
|
|
|
repo_name_a, tag_a = push_self_build_image_to_project(project, args.endpoint, 'admin', 'Harbor12345', image_a, "latest")
|
|
|
|
repo_name_b, tag_b = push_self_build_image_to_project(project, args.endpoint, 'admin', 'Harbor12345', image_b, "latest")
|
2020-09-15 12:51:40 +02:00
|
|
|
manifests = [args.endpoint+"/"+repo_name_a+":"+tag_a, args.endpoint+"/"+repo_name_b+":"+tag_b]
|
|
|
|
index = args.endpoint+"/"+project+"/"+name+":"+tag
|
2020-09-22 11:27:05 +02:00
|
|
|
docker_manifest_push_to_harbor(index, manifests, args.endpoint, 'admin', 'Harbor12345', cfg_file = args.libpath + "/update_docker_cfg.sh")
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2019-01-11 06:40:08 +01:00
|
|
|
def request(url, method, user = None, userp = None, **kwargs):
|
|
|
|
if user is None:
|
|
|
|
user = "admin"
|
2019-01-14 04:02:11 +01:00
|
|
|
if userp is None:
|
2019-01-11 06:40:08 +01:00
|
|
|
userp = "Harbor12345"
|
2018-07-25 11:46:09 +02:00
|
|
|
kwargs.setdefault('headers', kwargs.get('headers', {}))
|
|
|
|
kwargs['headers']['Accept'] = 'application/json'
|
|
|
|
if 'body' in kwargs:
|
|
|
|
kwargs['headers']['Content-Type'] = 'application/json'
|
|
|
|
kwargs['data'] = json.dumps(kwargs['body'])
|
|
|
|
del kwargs['body']
|
2020-08-10 06:02:38 +02:00
|
|
|
print("url: ", url)
|
2019-01-11 06:40:08 +01:00
|
|
|
resp = requests.request(method, url, verify=False, auth=(user, userp), **kwargs)
|
2018-07-25 11:46:09 +02:00
|
|
|
if resp.status_code >= 400:
|
2019-01-11 06:40:08 +01:00
|
|
|
raise Exception("[Exception Message] - {}".format(resp.text))
|
|
|
|
return resp
|
2018-07-25 11:46:09 +02:00
|
|
|
|
|
|
|
with open("data.json") as f:
|
|
|
|
data = json.load(f)
|
|
|
|
|
|
|
|
def pull_image(*image):
|
|
|
|
for i in image:
|
2020-09-15 12:51:40 +02:00
|
|
|
print("docker pulling image: ", i)
|
2018-07-25 11:46:09 +02:00
|
|
|
os.system("docker pull "+i)
|
|
|
|
|
|
|
|
def push_image(image, project):
|
|
|
|
os.system("docker tag "+image+" "+args.endpoint+"/"+project+"/"+image)
|
2019-10-29 11:18:05 +01:00
|
|
|
os.system("docker login "+args.endpoint+" -u admin"+" -p Harbor12345")
|
2020-09-15 12:51:40 +02:00
|
|
|
os.system("docker push "+args.endpoint+"/"+project+"/library/"+image)
|
2018-07-25 11:46:09 +02:00
|
|
|
|
|
|
|
def push_signed_image(image, project, tag):
|
|
|
|
os.system("./sign_image.sh" + " " + args.endpoint + " " + project + " " + image + " " + tag)
|
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
@get_feature_branch
|
|
|
|
def set_url(**kwargs):
|
|
|
|
global url
|
|
|
|
if kwargs["branch"] == 1:
|
|
|
|
url = "https://"+args.endpoint+"/api/"
|
|
|
|
elif kwargs["branch"] == 2:
|
|
|
|
url = "https://"+args.endpoint+"/api/v2.0/"
|
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
def do_data_creation():
|
|
|
|
harborAPI = HarborAPI()
|
2020-09-15 12:51:40 +02:00
|
|
|
set_url(version=args.version)
|
|
|
|
harborAPI.get_ca(version=args.version)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
for user in data["users"]:
|
|
|
|
harborAPI.create_user(user["name"])
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2018-07-25 11:46:09 +02:00
|
|
|
for user in data["admin"]:
|
2019-10-29 11:18:05 +01:00
|
|
|
harborAPI.set_user_admin(user["name"], version=args.version)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
# Make sure to create endpoint first, it's for proxy cache project creation.
|
|
|
|
for endpoint in data["endpoint"]:
|
2020-11-10 10:33:09 +01:00
|
|
|
print("endpoint:", endpoint)
|
2020-09-15 12:51:40 +02:00
|
|
|
harborAPI.add_endpoint(endpoint["url"], endpoint["name"], endpoint["user"], endpoint["pass"], endpoint["insecure"], endpoint["type"], version=args.version)
|
|
|
|
|
|
|
|
for distribution in data["distributions"]:
|
|
|
|
harborAPI.add_distribution(distribution, version=args.version)
|
|
|
|
|
2020-09-22 11:27:05 +02:00
|
|
|
harborAPI.populate_projects(version=args.version)
|
2021-02-25 04:57:27 +01:00
|
|
|
harborAPI.populate_quotas(version=args.version)
|
2019-02-27 07:27:39 +01:00
|
|
|
|
2020-09-15 12:51:40 +02:00
|
|
|
harborAPI.push_artifact_index(data["projects"][0]["name"], data["projects"][0]["artifact_index"]["name"], data["projects"][0]["artifact_index"]["tag"], version=args.version)
|
2020-10-26 07:58:47 +01:00
|
|
|
#pull_image("busybox", "redis", "haproxy", "alpine", "httpd:2")
|
2020-12-04 11:28:29 +01:00
|
|
|
push_self_build_image_to_project(data["projects"][0]["name"], args.endpoint, 'admin', 'Harbor12345', "busybox", "latest")
|
2018-07-25 11:46:09 +02:00
|
|
|
push_signed_image("alpine", data["projects"][0]["name"], "latest")
|
|
|
|
|
|
|
|
for replicationrule in data["replicationrule"]:
|
2019-10-29 11:18:05 +01:00
|
|
|
harborAPI.add_replication_rule(replicationrule, version=args.version)
|
|
|
|
|
2020-05-08 05:52:42 +02:00
|
|
|
|
|
|
|
harborAPI.update_interrogation_services(data["interrogation_services"]["cron"], version=args.version)
|
2019-10-29 11:18:05 +01:00
|
|
|
|
2019-02-27 07:27:39 +01:00
|
|
|
harborAPI.update_systemsetting(data["configuration"]["emailsetting"]["emailfrom"],
|
|
|
|
data["configuration"]["emailsetting"]["emailserver"],
|
|
|
|
float(data["configuration"]["emailsetting"]["emailport"]),
|
|
|
|
data["configuration"]["emailsetting"]["emailuser"],
|
2018-07-25 11:46:09 +02:00
|
|
|
data["configuration"]["projectcreation"],
|
|
|
|
data["configuration"]["selfreg"],
|
2020-05-08 05:52:42 +02:00
|
|
|
float(data["configuration"]["token"]),
|
|
|
|
float(data["configuration"]["robot_token"])*60*24)
|
2019-10-29 11:18:05 +01:00
|
|
|
|
2020-06-22 04:34:03 +02:00
|
|
|
harborAPI.add_sys_allowlist(data["configuration"]["deployment_security"], version=args.version)
|
2019-10-29 11:18:05 +01:00
|
|
|
|
2020-06-22 04:34:03 +02:00
|
|
|
do_data_creation()
|