2019-12-14 14:12:55 -05:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2020-02-08 19:16:19 -05:00
|
|
|
# provisioner.py - PVC API Provisioner functions
|
2019-12-14 14:12:55 -05:00
|
|
|
# Part of the Parallel Virtual Cluster (PVC) system
|
|
|
|
#
|
2021-03-25 17:01:55 -04:00
|
|
|
# Copyright (C) 2018-2021 Joshua M. Boniface <joshua@boniface.me>
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
2021-03-25 16:57:17 -04:00
|
|
|
# the Free Software Foundation, version 3.
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
#
|
|
|
|
###############################################################################
|
|
|
|
|
|
|
|
import json
|
|
|
|
import psycopg2
|
|
|
|
import psycopg2.extras
|
|
|
|
import re
|
|
|
|
|
2021-05-28 23:33:36 -04:00
|
|
|
from pvcapid.Daemon import config, strtobool
|
2020-02-18 16:18:27 -05:00
|
|
|
|
2021-05-30 15:59:37 -04:00
|
|
|
from daemon_lib.zkhandler import ZKHandler
|
2021-05-29 00:26:15 -04:00
|
|
|
|
2020-02-08 18:48:59 -05:00
|
|
|
import daemon_lib.common as pvc_common
|
|
|
|
import daemon_lib.node as pvc_node
|
|
|
|
import daemon_lib.vm as pvc_vm
|
|
|
|
import daemon_lib.network as pvc_network
|
|
|
|
import daemon_lib.ceph as pvc_ceph
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-02-08 19:16:19 -05:00
|
|
|
import pvcapid.libvirt_schema as libvirt_schema
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-02-17 22:52:49 -05:00
|
|
|
from pvcapid.ova import list_ova
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Exceptions (used by Celery tasks)
|
|
|
|
#
|
|
|
|
class ValidationError(Exception):
|
|
|
|
"""
|
|
|
|
An exception that results from some value being un- or mis-defined.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
pass
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
class ClusterError(Exception):
|
|
|
|
"""
|
|
|
|
An exception that results from the PVC cluster being out of alignment with the action.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
pass
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
class ProvisioningError(Exception):
|
|
|
|
"""
|
|
|
|
An exception that results from a failure of a provisioning command.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
pass
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Common functions
|
|
|
|
#
|
|
|
|
|
|
|
|
# Database connections
|
|
|
|
def open_database(config):
|
|
|
|
conn = psycopg2.connect(
|
2021-11-06 03:02:43 -04:00
|
|
|
host=config["database_host"],
|
|
|
|
port=config["database_port"],
|
|
|
|
dbname=config["database_name"],
|
|
|
|
user=config["database_user"],
|
|
|
|
password=config["database_password"],
|
2019-12-14 14:12:55 -05:00
|
|
|
)
|
|
|
|
cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
|
|
|
|
return conn, cur
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def close_database(conn, cur, failed=False):
|
|
|
|
if not failed:
|
|
|
|
conn.commit()
|
|
|
|
cur.close()
|
|
|
|
conn.close()
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Template List functions
|
|
|
|
#
|
|
|
|
def list_template(limit, table, is_fuzzy=True):
|
|
|
|
if limit:
|
|
|
|
if is_fuzzy:
|
|
|
|
# Handle fuzzy vs. non-fuzzy limits
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r"\^.*", limit):
|
|
|
|
limit = "%" + limit
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[1:]
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r".*\$", limit):
|
|
|
|
limit = limit + "%"
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[:-1]
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
args = (limit,)
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "SELECT * FROM {} WHERE name LIKE %s;".format(table)
|
|
|
|
else:
|
|
|
|
args = ()
|
|
|
|
query = "SELECT * FROM {};".format(table)
|
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
cur.execute(query, args)
|
|
|
|
data = cur.fetchall()
|
|
|
|
|
2020-02-21 10:50:28 -05:00
|
|
|
if not isinstance(data, list):
|
2020-11-07 13:02:54 -05:00
|
|
|
data = [data]
|
2020-02-21 10:50:28 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if table == "network_template":
|
2019-12-14 14:12:55 -05:00
|
|
|
for template_id, template_data in enumerate(data):
|
|
|
|
# Fetch list of VNIs from network table
|
2019-12-23 20:43:20 -05:00
|
|
|
query = "SELECT * FROM network WHERE network_template = %s;"
|
2021-11-06 03:02:43 -04:00
|
|
|
args = (template_data["id"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
cur.execute(query, args)
|
|
|
|
vnis = cur.fetchall()
|
2021-11-06 03:02:43 -04:00
|
|
|
data[template_id]["networks"] = vnis
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if table == "storage_template":
|
2019-12-14 14:12:55 -05:00
|
|
|
for template_id, template_data in enumerate(data):
|
|
|
|
# Fetch list of VNIs from network table
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM storage WHERE storage_template = %s"
|
|
|
|
args = (template_data["id"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
cur.execute(query, args)
|
|
|
|
disks = cur.fetchall()
|
2021-11-06 03:02:43 -04:00
|
|
|
data[template_id]["disks"] = disks
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
close_database(conn, cur)
|
2019-12-23 20:43:20 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
return data
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def list_template_system(limit, is_fuzzy=True):
|
|
|
|
"""
|
|
|
|
Obtain a list of system templates.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
data = list_template(limit, "system_template", is_fuzzy)
|
2020-01-05 02:29:38 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No system templates found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def list_template_network(limit, is_fuzzy=True):
|
|
|
|
"""
|
|
|
|
Obtain a list of network templates.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
data = list_template(limit, "network_template", is_fuzzy)
|
2020-01-05 02:29:38 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No network templates found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def list_template_network_vnis(name):
|
|
|
|
"""
|
|
|
|
Obtain a list of network template VNIs.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
data = list_template(name, "network_template", is_fuzzy=False)[0]
|
|
|
|
networks = data["networks"]
|
2020-01-05 02:29:38 -05:00
|
|
|
if networks:
|
|
|
|
return networks, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No network template networks found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def list_template_storage(limit, is_fuzzy=True):
|
|
|
|
"""
|
|
|
|
Obtain a list of storage templates.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
data = list_template(limit, "storage_template", is_fuzzy)
|
2020-01-05 02:29:38 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No storage templates found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def list_template_storage_disks(name):
|
|
|
|
"""
|
|
|
|
Obtain a list of storage template disks.
|
|
|
|
"""
|
2021-11-06 03:02:43 -04:00
|
|
|
data = list_template(name, "storage_template", is_fuzzy=False)[0]
|
|
|
|
disks = data["disks"]
|
2020-01-05 02:29:38 -05:00
|
|
|
if disks:
|
|
|
|
return disks, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No storage template disks found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def template_list(limit):
|
2020-01-05 02:29:38 -05:00
|
|
|
system_templates, code = list_template_system(limit)
|
2020-01-06 11:43:32 -05:00
|
|
|
if code != 200:
|
|
|
|
system_templates = []
|
2020-01-05 02:29:38 -05:00
|
|
|
network_templates, code = list_template_network(limit)
|
2020-01-06 11:43:32 -05:00
|
|
|
if code != 200:
|
|
|
|
network_templates = []
|
2020-01-05 02:29:38 -05:00
|
|
|
storage_templates, code = list_template_storage(limit)
|
2020-01-06 11:43:32 -05:00
|
|
|
if code != 200:
|
|
|
|
storage_templates = []
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
return {
|
|
|
|
"system_templates": system_templates,
|
|
|
|
"network_templates": network_templates,
|
|
|
|
"storage_templates": storage_templates,
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Template Create functions
|
|
|
|
#
|
2021-11-06 03:02:43 -04:00
|
|
|
def create_template_system(
|
|
|
|
name,
|
|
|
|
vcpu_count,
|
|
|
|
vram_mb,
|
|
|
|
serial=False,
|
|
|
|
vnc=False,
|
|
|
|
vnc_bind=None,
|
|
|
|
node_limit=None,
|
|
|
|
node_selector=None,
|
|
|
|
node_autostart=False,
|
|
|
|
migration_method=None,
|
|
|
|
ova=None,
|
|
|
|
):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_template_system(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The system template "{}" already exists.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if node_selector == "none":
|
2021-06-01 11:05:15 -04:00
|
|
|
node_selector = None
|
|
|
|
|
2020-10-29 11:31:32 -04:00
|
|
|
query = "INSERT INTO system_template (name, vcpu_count, vram_mb, serial, vnc, vnc_bind, node_limit, node_selector, node_autostart, migration_method, ova) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"
|
2021-11-06 03:02:43 -04:00
|
|
|
args = (
|
|
|
|
name,
|
|
|
|
vcpu_count,
|
|
|
|
vram_mb,
|
|
|
|
serial,
|
|
|
|
vnc,
|
|
|
|
vnc_bind,
|
|
|
|
node_limit,
|
|
|
|
node_selector,
|
|
|
|
node_autostart,
|
|
|
|
migration_method,
|
|
|
|
ova,
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Added new system template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Failed to create system template "{}": {}'.format(name, e)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def create_template_network(name, mac_template=None):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_template_network(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The network template "{}" already exists.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "INSERT INTO network_template (name, mac_template) VALUES (%s, %s);"
|
|
|
|
args = (name, mac_template)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Added new network template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Failed to create network template "{}": {}'.format(name, e)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def create_template_network_element(name, vni):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_network(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The network template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 19:41:19 -05:00
|
|
|
networks, code = list_template_network_vnis(name)
|
|
|
|
if code != 200:
|
|
|
|
networks = []
|
2019-12-14 14:12:55 -05:00
|
|
|
found_vni = False
|
|
|
|
for network in networks:
|
2021-11-06 03:02:43 -04:00
|
|
|
if network["vni"] == vni:
|
2019-12-14 14:12:55 -05:00
|
|
|
found_vni = True
|
|
|
|
if found_vni:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The VNI "{}" in network template "{}" already exists.'.format(
|
|
|
|
vni, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM network_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "INSERT INTO network (network_template, vni) VALUES (%s, %s);"
|
|
|
|
args = (template_id, vni)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Added new network "{}" to network template "{}".'.format(
|
|
|
|
vni, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(vni, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def create_template_storage(name):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_template_storage(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The storage template "{}" already exists.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "INSERT INTO storage_template (name) VALUES (%s);"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Added new storage template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
def create_template_storage_element(
|
|
|
|
name,
|
|
|
|
disk_id,
|
|
|
|
pool,
|
|
|
|
source_volume=None,
|
|
|
|
disk_size_gb=None,
|
|
|
|
filesystem=None,
|
|
|
|
filesystem_args=[],
|
|
|
|
mountpoint=None,
|
|
|
|
):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_storage(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The storage template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 19:11:39 -05:00
|
|
|
disks, code = list_template_storage_disks(name)
|
2020-01-05 19:41:19 -05:00
|
|
|
if code != 200:
|
|
|
|
disks = []
|
2019-12-14 14:12:55 -05:00
|
|
|
found_disk = False
|
|
|
|
for disk in disks:
|
2021-11-06 03:02:43 -04:00
|
|
|
if disk["disk_id"] == disk_id:
|
2019-12-14 14:12:55 -05:00
|
|
|
found_disk = True
|
|
|
|
if found_disk:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The disk "{}" in storage template "{}" already exists.'.format(
|
|
|
|
disk_id, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
if mountpoint and not filesystem:
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": "A filesystem must be specified along with a mountpoint."}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 17:25:34 -05:00
|
|
|
if source_volume and (disk_size_gb or filesystem or mountpoint):
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": "Clone volumes are not compatible with disk size, filesystem, or mountpoint specifications."
|
|
|
|
}
|
2020-01-05 17:25:34 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM storage_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2020-01-05 17:25:34 -05:00
|
|
|
query = "INSERT INTO storage (storage_template, pool, disk_id, source_volume, disk_size_gb, mountpoint, filesystem, filesystem_args) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);"
|
2020-01-05 19:11:39 -05:00
|
|
|
if filesystem_args:
|
2021-11-06 03:02:43 -04:00
|
|
|
fsargs = " ".join(filesystem_args)
|
2020-01-05 19:11:39 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
fsargs = ""
|
|
|
|
args = (
|
|
|
|
template_id,
|
|
|
|
pool,
|
|
|
|
disk_id,
|
|
|
|
source_volume,
|
|
|
|
disk_size_gb,
|
|
|
|
mountpoint,
|
|
|
|
filesystem,
|
|
|
|
fsargs,
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Added new disk "{}" to storage template "{}".'.format(
|
|
|
|
disk_id, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(disk_id, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2020-02-18 16:18:27 -05:00
|
|
|
#
|
|
|
|
# Template Modify functions
|
|
|
|
#
|
2021-11-06 03:02:43 -04:00
|
|
|
def modify_template_system(
|
|
|
|
name,
|
|
|
|
vcpu_count=None,
|
|
|
|
vram_mb=None,
|
|
|
|
serial=None,
|
|
|
|
vnc=None,
|
|
|
|
vnc_bind=None,
|
|
|
|
node_limit=None,
|
|
|
|
node_selector=None,
|
|
|
|
node_autostart=None,
|
|
|
|
migration_method=None,
|
|
|
|
):
|
2020-10-29 12:14:10 -04:00
|
|
|
if list_template_system(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The system template "{}" does not exist.'.format(name)}
|
2020-10-29 12:14:10 -04:00
|
|
|
retcode = 404
|
2020-02-18 16:18:27 -05:00
|
|
|
return retmsg, retcode
|
|
|
|
|
|
|
|
fields = []
|
|
|
|
|
|
|
|
if vcpu_count is not None:
|
|
|
|
try:
|
|
|
|
vcpu_count = int(vcpu_count)
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": "The vcpus value must be an integer."}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "vcpu_count", "data": vcpu_count})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if vram_mb is not None:
|
|
|
|
try:
|
|
|
|
vram_mb = int(vram_mb)
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": "The vram value must be an integer."}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "vram_mb", "data": vram_mb})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if serial is not None:
|
|
|
|
try:
|
|
|
|
serial = bool(strtobool(serial))
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": "The serial value must be a boolean."}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "serial", "data": serial})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if vnc is not None:
|
|
|
|
try:
|
|
|
|
vnc = bool(strtobool(vnc))
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": "The vnc value must be a boolean."}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "vnc", "data": vnc})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if vnc_bind is not None:
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "vnc_bind", "data": vnc_bind})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if node_limit is not None:
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "node_limit", "data": node_limit})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if node_selector is not None:
|
2021-11-06 03:02:43 -04:00
|
|
|
if node_selector == "none":
|
|
|
|
node_selector = "None"
|
2021-06-01 11:05:15 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "node_selector", "data": node_selector})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
|
|
|
if node_autostart is not None:
|
|
|
|
try:
|
|
|
|
node_autostart = bool(strtobool(node_autostart))
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": "The node_autostart value must be a boolean."}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "node_autostart", "data": node_autostart})
|
2020-02-18 16:18:27 -05:00
|
|
|
|
2020-10-29 11:31:32 -04:00
|
|
|
if migration_method is not None:
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "migration_method", "data": migration_method})
|
2020-10-29 11:31:32 -04:00
|
|
|
|
2020-02-18 16:18:27 -05:00
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
for field in fields:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "UPDATE system_template SET {} = %s WHERE name = %s;".format(
|
|
|
|
field.get("field")
|
|
|
|
)
|
|
|
|
args = (field.get("data"), name)
|
2020-02-18 16:18:27 -05:00
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Modified system template "{}".'.format(name)}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 200
|
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to modify entry "{}": {}'.format(name, e)}
|
2020-02-18 16:18:27 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
|
|
|
return retmsg, retcode
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Template Delete functions
|
|
|
|
#
|
|
|
|
def delete_template_system(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_system(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The system template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "DELETE FROM system_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed system template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_template_network(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_network(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The network template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM network_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "DELETE FROM network WHERE network_template = %s;"
|
|
|
|
args = (template_id,)
|
|
|
|
cur.execute(query, args)
|
|
|
|
query = "DELETE FROM network_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed network template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_template_network_element(name, vni):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_network(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The network template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 19:11:39 -05:00
|
|
|
networks, code = list_template_network_vnis(name)
|
2019-12-14 14:12:55 -05:00
|
|
|
found_vni = False
|
|
|
|
for network in networks:
|
2021-11-06 03:02:43 -04:00
|
|
|
if network["vni"] == int(vni):
|
2019-12-14 14:12:55 -05:00
|
|
|
found_vni = True
|
|
|
|
if not found_vni:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The VNI "{}" in network template "{}" does not exist.'.format(
|
|
|
|
vni, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM network_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "DELETE FROM network WHERE network_template = %s and vni = %s;"
|
|
|
|
args = (template_id, vni)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Removed network "{}" from network template "{}".'.format(
|
|
|
|
vni, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_template_storage(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_storage(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The storage template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM storage_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "DELETE FROM storage WHERE storage_template = %s;"
|
|
|
|
args = (template_id,)
|
|
|
|
cur.execute(query, args)
|
|
|
|
query = "DELETE FROM storage_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed storage template "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_template_storage_element(name, disk_id):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_template_storage(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The storage template "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 19:11:39 -05:00
|
|
|
disks, code = list_template_storage_disks(name)
|
2019-12-14 14:12:55 -05:00
|
|
|
found_disk = False
|
|
|
|
for disk in disks:
|
2021-11-06 03:02:43 -04:00
|
|
|
if disk["disk_id"] == disk_id:
|
2019-12-14 14:12:55 -05:00
|
|
|
found_disk = True
|
|
|
|
if not found_disk:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The disk "{}" in storage template "{}" does not exist.'.format(
|
|
|
|
disk_id, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "SELECT id FROM storage_template WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
template_id = cur.fetchone()["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
query = "DELETE FROM storage WHERE storage_template = %s and disk_id = %s;"
|
|
|
|
args = (template_id, disk_id)
|
|
|
|
cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'Removed disk "{}" from storage template "{}".'.format(
|
|
|
|
disk_id, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-30 15:01:28 -05:00
|
|
|
#
|
|
|
|
# Userdata functions
|
|
|
|
#
|
|
|
|
def list_userdata(limit, is_fuzzy=True):
|
|
|
|
if limit:
|
|
|
|
if is_fuzzy:
|
|
|
|
# Handle fuzzy vs. non-fuzzy limits
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r"\^.*", limit):
|
|
|
|
limit = "%" + limit
|
2019-12-30 15:01:28 -05:00
|
|
|
else:
|
|
|
|
limit = limit[1:]
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r".*\$", limit):
|
|
|
|
limit = limit + "%"
|
2019-12-30 15:01:28 -05:00
|
|
|
else:
|
|
|
|
limit = limit[:-1]
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {} WHERE name LIKE %s;".format("userdata")
|
|
|
|
args = (limit,)
|
2019-12-30 15:01:28 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {};".format("userdata")
|
2019-12-30 15:01:28 -05:00
|
|
|
args = ()
|
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
cur.execute(query, args)
|
|
|
|
data = cur.fetchall()
|
|
|
|
close_database(conn, cur)
|
2020-01-05 02:29:38 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No userdata documents found."}, 404
|
2019-12-30 15:01:28 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-30 15:01:28 -05:00
|
|
|
def create_userdata(name, userdata):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_userdata(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The userdata document "{}" already exists.'.format(name)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-30 15:01:28 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "INSERT INTO userdata (name, userdata) VALUES (%s, %s);"
|
|
|
|
args = (name, userdata)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Created userdata document "{}".'.format(name)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(name, e)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-30 15:01:28 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-30 15:01:28 -05:00
|
|
|
def update_userdata(name, userdata):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_userdata(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The userdata "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 22:11:40 -05:00
|
|
|
data, code = list_userdata(name, is_fuzzy=False)
|
2021-11-06 03:02:43 -04:00
|
|
|
tid = data[0]["id"]
|
2019-12-30 15:01:28 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
2019-12-30 15:01:28 -05:00
|
|
|
query = "UPDATE userdata SET userdata = %s WHERE id = %s;"
|
|
|
|
args = (userdata, tid)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Updated userdata document "{}".'.format(name)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to update entry "{}": {}'.format(name, e)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-30 15:01:28 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-30 15:01:28 -05:00
|
|
|
def delete_userdata(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_userdata(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The userdata "{}" does not exist.'.format(name)}
|
2019-12-30 15:01:28 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-30 15:01:28 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "DELETE FROM userdata WHERE name = %s;"
|
2019-12-14 14:12:55 -05:00
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed userdata document "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Script functions
|
|
|
|
#
|
|
|
|
def list_script(limit, is_fuzzy=True):
|
|
|
|
if limit:
|
|
|
|
if is_fuzzy:
|
|
|
|
# Handle fuzzy vs. non-fuzzy limits
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r"\^.*", limit):
|
|
|
|
limit = "%" + limit
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[1:]
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r".*\$", limit):
|
|
|
|
limit = limit + "%"
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[:-1]
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {} WHERE name LIKE %s;".format("script")
|
|
|
|
args = (limit,)
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {};".format("script")
|
2019-12-14 14:12:55 -05:00
|
|
|
args = ()
|
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
cur.execute(query, args)
|
|
|
|
data = cur.fetchall()
|
|
|
|
close_database(conn, cur)
|
2020-01-05 02:29:38 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No scripts found."}, 404
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def create_script(name, script):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_script(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The script "{}" already exists.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "INSERT INTO script (name, script) VALUES (%s, %s);"
|
|
|
|
args = (name, script)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Created provisioning script "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def update_script(name, script):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_script(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The script "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 02:47:32 -05:00
|
|
|
data, code = list_script(name, is_fuzzy=False)
|
2021-11-06 03:02:43 -04:00
|
|
|
tid = data[0]["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "UPDATE script SET script = %s WHERE id = %s;"
|
|
|
|
args = (script, tid)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Updated provisioning script "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to update entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_script(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_script(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The script "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "DELETE FROM script WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed provisioning script "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Profile functions
|
|
|
|
#
|
|
|
|
def list_profile(limit, is_fuzzy=True):
|
|
|
|
if limit:
|
2020-01-07 11:45:45 -05:00
|
|
|
if is_fuzzy:
|
2019-12-14 14:12:55 -05:00
|
|
|
# Handle fuzzy vs. non-fuzzy limits
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r"\^.*", limit):
|
|
|
|
limit = "%" + limit
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[1:]
|
2021-11-06 03:02:43 -04:00
|
|
|
if not re.match(r".*\$", limit):
|
|
|
|
limit = limit + "%"
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
|
|
|
limit = limit[:-1]
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {} WHERE name LIKE %s;".format("profile")
|
|
|
|
args = (limit,)
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM {};".format("profile")
|
2019-12-14 14:12:55 -05:00
|
|
|
args = ()
|
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
cur.execute(query, args)
|
|
|
|
orig_data = cur.fetchall()
|
|
|
|
data = list()
|
|
|
|
for profile in orig_data:
|
|
|
|
profile_data = dict()
|
2021-11-06 03:02:43 -04:00
|
|
|
profile_data["id"] = profile["id"]
|
|
|
|
profile_data["name"] = profile["name"]
|
|
|
|
profile_data["type"] = profile["profile_type"]
|
2019-12-14 14:12:55 -05:00
|
|
|
# Parse the name of each subelement
|
2021-11-06 03:02:43 -04:00
|
|
|
for etype in (
|
|
|
|
"system_template",
|
|
|
|
"network_template",
|
|
|
|
"storage_template",
|
|
|
|
"userdata",
|
|
|
|
"script",
|
|
|
|
"ova",
|
|
|
|
):
|
|
|
|
query = "SELECT name from {} WHERE id = %s".format(etype)
|
2019-12-14 14:12:55 -05:00
|
|
|
args = (profile[etype],)
|
|
|
|
cur.execute(query, args)
|
2020-02-17 11:53:34 -05:00
|
|
|
try:
|
2021-11-06 03:02:43 -04:00
|
|
|
name = cur.fetchone()["name"]
|
2020-11-06 21:13:13 -05:00
|
|
|
except Exception:
|
2020-02-17 11:53:34 -05:00
|
|
|
name = "N/A"
|
2019-12-14 14:12:55 -05:00
|
|
|
profile_data[etype] = name
|
|
|
|
# Split the arguments back into a list
|
2021-11-06 03:02:43 -04:00
|
|
|
profile_data["arguments"] = profile["arguments"].split("|")
|
2019-12-14 14:12:55 -05:00
|
|
|
# Append the new data to our actual output structure
|
|
|
|
data.append(profile_data)
|
|
|
|
close_database(conn, cur)
|
2020-01-05 20:16:49 -05:00
|
|
|
if data:
|
|
|
|
return data, 200
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
return {"message": "No profiles found."}, 404
|
|
|
|
|
|
|
|
|
|
|
|
def create_profile(
|
|
|
|
name,
|
|
|
|
profile_type,
|
|
|
|
system_template,
|
|
|
|
network_template,
|
|
|
|
storage_template,
|
|
|
|
userdata=None,
|
|
|
|
script=None,
|
|
|
|
ova=None,
|
|
|
|
arguments=None,
|
|
|
|
):
|
2020-01-05 19:22:09 -05:00
|
|
|
if list_profile(name, is_fuzzy=False)[-1] != 404:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The profile "{}" already exists.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if profile_type not in ["provisioner", "ova"]:
|
|
|
|
retmsg = {
|
|
|
|
"message": "A valid profile type (provisioner, ova) must be specified."
|
|
|
|
}
|
2020-02-17 11:53:34 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
|
|
|
|
2020-01-05 16:53:12 -05:00
|
|
|
system_templates, code = list_template_system(None)
|
2019-12-14 14:12:55 -05:00
|
|
|
system_template_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
system_templates = []
|
2019-12-14 14:12:55 -05:00
|
|
|
for template in system_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == system_template:
|
|
|
|
system_template_id = template["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
if not system_template_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The system template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
system_template, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:53:12 -05:00
|
|
|
network_templates, code = list_template_network(None)
|
2019-12-14 14:12:55 -05:00
|
|
|
network_template_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
network_templates = []
|
2019-12-14 14:12:55 -05:00
|
|
|
for template in network_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == network_template:
|
|
|
|
network_template_id = template["id"]
|
|
|
|
if not network_template_id and profile_type != "ova":
|
|
|
|
retmsg = {
|
|
|
|
"message": 'The network template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
network_template, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:53:12 -05:00
|
|
|
storage_templates, code = list_template_storage(None)
|
2019-12-14 14:12:55 -05:00
|
|
|
storage_template_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
storage_templates = []
|
2019-12-14 14:12:55 -05:00
|
|
|
for template in storage_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == storage_template:
|
|
|
|
storage_template_id = template["id"]
|
|
|
|
if not storage_template_id and profile_type != "ova":
|
|
|
|
retmsg = {
|
|
|
|
"message": 'The storage template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
storage_template, name
|
|
|
|
)
|
|
|
|
}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:53:12 -05:00
|
|
|
userdatas, code = list_userdata(None)
|
2020-01-04 13:39:48 -05:00
|
|
|
userdata_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
userdatas = []
|
2020-01-04 13:39:48 -05:00
|
|
|
for template in userdatas:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == userdata:
|
|
|
|
userdata_id = template["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:53:12 -05:00
|
|
|
scripts, code = list_script(None)
|
2019-12-14 14:12:55 -05:00
|
|
|
script_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
scripts = []
|
2019-12-14 14:12:55 -05:00
|
|
|
for scr in scripts:
|
2021-11-06 03:02:43 -04:00
|
|
|
if scr["name"] == script:
|
|
|
|
script_id = scr["id"]
|
2020-02-17 11:53:34 -05:00
|
|
|
|
|
|
|
ovas, code = list_ova(None)
|
|
|
|
ova_id = None
|
2020-02-24 09:30:58 -05:00
|
|
|
if code != 200:
|
|
|
|
ovas = []
|
2020-02-17 11:53:34 -05:00
|
|
|
for ov in ovas:
|
2021-11-06 03:02:43 -04:00
|
|
|
if ov["name"] == ova:
|
|
|
|
ova_id = ov["id"]
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-04 14:05:42 -05:00
|
|
|
if arguments is not None and isinstance(arguments, list):
|
2021-11-06 03:02:43 -04:00
|
|
|
arguments_formatted = "|".join(arguments)
|
2020-01-04 14:05:42 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
arguments_formatted = ""
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
2020-02-17 22:52:49 -05:00
|
|
|
query = "INSERT INTO profile (name, profile_type, system_template, network_template, storage_template, userdata, script, ova, arguments) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);"
|
2021-11-06 03:02:43 -04:00
|
|
|
args = (
|
|
|
|
name,
|
|
|
|
profile_type,
|
|
|
|
system_template_id,
|
|
|
|
network_template_id,
|
|
|
|
storage_template_id,
|
|
|
|
userdata_id,
|
|
|
|
script_id,
|
|
|
|
ova_id,
|
|
|
|
arguments_formatted,
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Created VM profile "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to create entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
def modify_profile(
|
|
|
|
name,
|
|
|
|
profile_type,
|
|
|
|
system_template,
|
|
|
|
network_template,
|
|
|
|
storage_template,
|
|
|
|
userdata,
|
|
|
|
script,
|
|
|
|
ova,
|
|
|
|
arguments=None,
|
|
|
|
):
|
2020-01-16 22:35:55 -05:00
|
|
|
if list_profile(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The profile "{}" does not exist.'.format(name)}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
|
|
|
|
|
|
|
fields = []
|
|
|
|
|
2020-02-17 11:53:34 -05:00
|
|
|
if profile_type is not None:
|
2021-11-06 03:02:43 -04:00
|
|
|
if profile_type not in ["provisioner", "ova"]:
|
|
|
|
retmsg = {
|
|
|
|
"message": "A valid profile type (provisioner, ova) must be specified."
|
|
|
|
}
|
2020-02-17 11:53:34 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "type", "data": profile_type})
|
2020-02-17 11:53:34 -05:00
|
|
|
|
2020-01-16 22:35:55 -05:00
|
|
|
if system_template is not None:
|
|
|
|
system_templates, code = list_template_system(None)
|
|
|
|
system_template_id = None
|
|
|
|
for template in system_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == system_template:
|
|
|
|
system_template_id = template["id"]
|
2020-01-16 22:35:55 -05:00
|
|
|
if not system_template_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The system template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
system_template, name
|
|
|
|
)
|
|
|
|
}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "system_template", "data": system_template_id})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
|
|
|
if network_template is not None:
|
|
|
|
network_templates, code = list_template_network(None)
|
|
|
|
network_template_id = None
|
|
|
|
for template in network_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == network_template:
|
|
|
|
network_template_id = template["id"]
|
2020-01-16 22:35:55 -05:00
|
|
|
if not network_template_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The network template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
network_template, name
|
|
|
|
)
|
|
|
|
}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "network_template", "data": network_template_id})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
|
|
|
if storage_template is not None:
|
|
|
|
storage_templates, code = list_template_storage(None)
|
|
|
|
storage_template_id = None
|
|
|
|
for template in storage_templates:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == storage_template:
|
|
|
|
storage_template_id = template["id"]
|
2020-01-16 22:35:55 -05:00
|
|
|
if not storage_template_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The storage template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
storage_template, name
|
|
|
|
)
|
|
|
|
}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "storage_template", "data": storage_template_id})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
|
|
|
if userdata is not None:
|
|
|
|
userdatas, code = list_userdata(None)
|
|
|
|
userdata_id = None
|
|
|
|
for template in userdatas:
|
2021-11-06 03:02:43 -04:00
|
|
|
if template["name"] == userdata:
|
|
|
|
userdata_id = template["id"]
|
2020-01-16 22:35:55 -05:00
|
|
|
if not userdata_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The userdata template "{}" for profile "{}" does not exist.'.format(
|
|
|
|
userdata, name
|
|
|
|
)
|
|
|
|
}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "userdata", "data": userdata_id})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
|
|
|
if script is not None:
|
|
|
|
scripts, code = list_script(None)
|
|
|
|
script_id = None
|
|
|
|
for scr in scripts:
|
2021-11-06 03:02:43 -04:00
|
|
|
if scr["name"] == script:
|
|
|
|
script_id = scr["id"]
|
2020-01-16 22:35:55 -05:00
|
|
|
if not script_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The script "{}" for profile "{}" does not exist.'.format(
|
|
|
|
script, name
|
|
|
|
)
|
|
|
|
}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "script", "data": script_id})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
2020-02-17 11:53:34 -05:00
|
|
|
if ova is not None:
|
|
|
|
ovas, code = list_ova(None)
|
|
|
|
ova_id = None
|
|
|
|
for ov in ovas:
|
2021-11-06 03:02:43 -04:00
|
|
|
if ov["name"] == ova:
|
|
|
|
ova_id = ov["id"]
|
2020-02-17 11:53:34 -05:00
|
|
|
if not ova_id:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {
|
|
|
|
"message": 'The OVA "{}" for profile "{}" does not exist.'.format(
|
|
|
|
ova, name
|
|
|
|
)
|
|
|
|
}
|
2020-02-17 11:53:34 -05:00
|
|
|
retcode = 400
|
|
|
|
return retmsg, retcode
|
2021-11-06 03:02:43 -04:00
|
|
|
fields.append({"field": "ova", "data": ova_id})
|
2020-02-17 11:53:34 -05:00
|
|
|
|
2020-01-16 22:35:55 -05:00
|
|
|
if arguments is not None:
|
|
|
|
if isinstance(arguments, list):
|
2021-11-06 03:02:43 -04:00
|
|
|
arguments_formatted = "|".join(arguments)
|
2020-01-16 22:35:55 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
arguments_formatted = ""
|
|
|
|
fields.append({"field": "arguments", "data": arguments_formatted})
|
2020-01-16 22:35:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
for field in fields:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "UPDATE profile SET {}=%s WHERE name=%s;".format(field.get("field"))
|
|
|
|
args = (field.get("data"), name)
|
2020-01-16 22:35:55 -05:00
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Modified VM profile "{}".'.format(name)}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 200
|
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to modify entry "{}": {}'.format(name, e)}
|
2020-01-16 22:35:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
|
|
|
return retmsg, retcode
|
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
def delete_profile(name):
|
2020-01-05 21:58:22 -05:00
|
|
|
if list_profile(name, is_fuzzy=False)[-1] != 200:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'The profile "{}" does not exist.'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
conn, cur = open_database(config)
|
|
|
|
try:
|
|
|
|
query = "DELETE FROM profile WHERE name = %s;"
|
|
|
|
args = (name,)
|
|
|
|
cur.execute(query, args)
|
2020-11-07 12:57:42 -05:00
|
|
|
retmsg = {"message": 'Removed VM profile "{}".'.format(name)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 200
|
2020-01-08 09:31:08 -05:00
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
retmsg = {"message": 'Failed to delete entry "{}": {}'.format(name, e)}
|
2019-12-14 14:12:55 -05:00
|
|
|
retcode = 400
|
|
|
|
close_database(conn, cur)
|
2020-01-03 12:00:06 -05:00
|
|
|
return retmsg, retcode
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-11-07 14:45:24 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
#
|
|
|
|
# Main VM provisioning function - executed by the Celery worker
|
|
|
|
#
|
2021-11-06 03:02:43 -04:00
|
|
|
def create_vm(
|
|
|
|
self, vm_name, vm_profile, define_vm=True, start_vm=True, script_run_args=[]
|
|
|
|
):
|
2019-12-14 14:12:55 -05:00
|
|
|
# Runtime imports
|
|
|
|
import time
|
|
|
|
import importlib
|
|
|
|
import uuid
|
|
|
|
import datetime
|
|
|
|
import random
|
|
|
|
|
2021-05-30 16:04:38 -04:00
|
|
|
temp_dir = None
|
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
time.sleep(2)
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
"Starting provisioning of VM '{}' with profile '{}'".format(vm_name, vm_profile)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Phase 0 - connect to databases
|
|
|
|
try:
|
|
|
|
db_conn, db_cur = open_database(config)
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError("Failed to connect to Postgres")
|
2021-05-30 15:59:37 -04:00
|
|
|
|
|
|
|
try:
|
|
|
|
zkhandler = ZKHandler(config)
|
|
|
|
zkhandler.connect()
|
|
|
|
except Exception:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError("Failed to connect to Zookeeper")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Phase 1 - setup
|
|
|
|
# * Get the profile elements
|
|
|
|
# * Get the details from these elements
|
|
|
|
# * Assemble a VM configuration dictionary
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 1, "total": 10, "status": "Collecting configuration"},
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
time.sleep(1)
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_id = re.findall(r"/(\d+)$/", vm_name)
|
2019-12-14 14:12:55 -05:00
|
|
|
if not vm_id:
|
|
|
|
vm_id = 0
|
|
|
|
else:
|
|
|
|
vm_id = vm_id[0]
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
vm_data = dict()
|
|
|
|
|
|
|
|
# Get the profile information
|
2019-12-23 20:43:20 -05:00
|
|
|
query = "SELECT * FROM profile WHERE name = %s"
|
2019-12-14 14:12:55 -05:00
|
|
|
args = (vm_profile,)
|
|
|
|
db_cur.execute(query, args)
|
|
|
|
profile_data = db_cur.fetchone()
|
2021-11-06 03:02:43 -04:00
|
|
|
if profile_data.get("arguments"):
|
|
|
|
vm_data["script_arguments"] = profile_data.get("arguments").split("|")
|
2020-01-07 01:09:29 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["script_arguments"] = []
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if profile_data.get("profile_type") == "ova":
|
2020-02-18 14:42:45 -05:00
|
|
|
is_ova_install = True
|
2020-11-07 13:11:03 -05:00
|
|
|
is_script_install = False # By definition
|
2020-02-18 14:42:45 -05:00
|
|
|
else:
|
|
|
|
is_ova_install = False
|
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
# Get the system details
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM system_template WHERE id = %s"
|
|
|
|
args = (profile_data["system_template"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
db_cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["system_details"] = db_cur.fetchone()
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Get the MAC template
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT mac_template FROM network_template WHERE id = %s"
|
|
|
|
args = (profile_data["network_template"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
db_cur.execute(query, args)
|
2020-02-18 14:42:45 -05:00
|
|
|
db_row = db_cur.fetchone()
|
|
|
|
if db_row:
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["mac_template"] = db_row.get("mac_template")
|
2020-02-18 14:42:45 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["mac_template"] = None
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Get the networks
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM network WHERE network_template = %s"
|
|
|
|
args = (profile_data["network_template"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
db_cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["networks"] = db_cur.fetchall()
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Get the storage volumes
|
2020-01-20 21:12:33 -05:00
|
|
|
# ORDER BY ensures disks are always in the sdX/vdX order, regardless of add order
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM storage WHERE storage_template = %s ORDER BY disk_id"
|
|
|
|
args = (profile_data["storage_template"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
db_cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["volumes"] = db_cur.fetchall()
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Get the script
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT script FROM script WHERE id = %s"
|
|
|
|
args = (profile_data["script"],)
|
2019-12-14 14:12:55 -05:00
|
|
|
db_cur.execute(query, args)
|
2020-02-18 20:33:12 -05:00
|
|
|
db_row = db_cur.fetchone()
|
|
|
|
if db_row:
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["script"] = db_row.get("script")
|
2020-02-18 20:33:12 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["script"] = None
|
2020-01-05 16:35:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if vm_data["script"] and not is_ova_install:
|
2020-01-05 16:35:55 -05:00
|
|
|
is_script_install = True
|
2020-02-18 14:42:45 -05:00
|
|
|
else:
|
|
|
|
is_script_install = False
|
|
|
|
|
|
|
|
# Get the OVA details
|
|
|
|
if is_ova_install:
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM ova WHERE id = %s"
|
|
|
|
args = (profile_data["ova"],)
|
2020-02-18 14:42:45 -05:00
|
|
|
db_cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["ova_details"] = db_cur.fetchone()
|
2020-02-18 14:42:45 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
query = "SELECT * FROM ova_volume WHERE ova = %s"
|
|
|
|
args = (profile_data["ova"],)
|
2020-02-18 14:42:45 -05:00
|
|
|
db_cur.execute(query, args)
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_data["volumes"] = db_cur.fetchall()
|
2020-01-05 16:35:55 -05:00
|
|
|
|
2019-12-14 14:12:55 -05:00
|
|
|
close_database(db_conn, db_cur)
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
"VM configuration data:\n{}".format(
|
|
|
|
json.dumps(vm_data, sort_keys=True, indent=2)
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Phase 2 - verification
|
|
|
|
# * Ensure that at least one node has enough free RAM to hold the VM (becomes main host)
|
|
|
|
# * Ensure that all networks are valid
|
|
|
|
# * Ensure that there is enough disk space in the Ceph cluster for the disks
|
|
|
|
# This is the "safe fail" step when an invalid configuration will be caught
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={
|
|
|
|
"current": 2,
|
|
|
|
"total": 10,
|
|
|
|
"status": "Verifying configuration against cluster",
|
|
|
|
},
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
# Verify that a VM with this name does not already exist
|
2021-05-29 00:26:15 -04:00
|
|
|
if pvc_vm.searchClusterByName(zkhandler, vm_name):
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError(
|
|
|
|
"A VM with the name '{}' already exists in the cluster.".format(vm_name)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Verify that at least one host has enough free RAM to run the VM
|
2021-05-29 00:26:15 -04:00
|
|
|
_discard, nodes = pvc_node.get_list(zkhandler, None)
|
2019-12-14 14:12:55 -05:00
|
|
|
target_node = None
|
|
|
|
last_free = 0
|
|
|
|
for node in nodes:
|
|
|
|
# Skip the node if it is not ready to run VMs
|
2021-11-06 03:02:43 -04:00
|
|
|
if node["daemon_state"] != "run" or node["domain_state"] != "ready":
|
2019-12-14 14:12:55 -05:00
|
|
|
continue
|
|
|
|
# Skip the node if its free memory is less than the new VM's size, plus a 512MB buffer
|
2021-11-06 03:02:43 -04:00
|
|
|
if node["memory"]["free"] < (vm_data["system_details"]["vram_mb"] + 512):
|
2019-12-14 14:12:55 -05:00
|
|
|
continue
|
|
|
|
# If this node has the most free, use it
|
2021-11-06 03:02:43 -04:00
|
|
|
if node["memory"]["free"] > last_free:
|
|
|
|
last_free = node["memory"]["free"]
|
|
|
|
target_node = node["name"]
|
2019-12-14 14:12:55 -05:00
|
|
|
# Raise if no node was found
|
|
|
|
if not target_node:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError(
|
|
|
|
"No ready cluster node contains at least {}+512 MB of free RAM.".format(
|
|
|
|
vm_data["system_details"]["vram_mb"]
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
'Selecting target node "{}" with "{}" MB free RAM'.format(
|
|
|
|
target_node, last_free
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Verify that all configured networks are present on the cluster
|
2021-05-29 00:26:15 -04:00
|
|
|
cluster_networks, _discard = pvc_network.getClusterNetworkList(zkhandler)
|
2021-11-06 03:02:43 -04:00
|
|
|
for network in vm_data["networks"]:
|
|
|
|
vni = str(network["vni"])
|
|
|
|
if vni not in cluster_networks and vni not in [
|
|
|
|
"upstream",
|
|
|
|
"cluster",
|
|
|
|
"storage",
|
|
|
|
]:
|
|
|
|
raise ClusterError(
|
|
|
|
'The network VNI "{}" is not present on the cluster.'.format(vni)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
print("All configured networks for VM are valid")
|
|
|
|
|
|
|
|
# Verify that there is enough disk space free to provision all VM disks
|
|
|
|
pools = dict()
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
|
|
|
if volume.get("source_volume") is not None:
|
|
|
|
volume_data = pvc_ceph.getVolumeInformation(
|
|
|
|
zkhandler, volume["pool"], volume["source_volume"]
|
|
|
|
)
|
2021-03-09 16:59:49 -05:00
|
|
|
if not volume_data:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError(
|
|
|
|
"The source volume {}/{} could not be found.".format(
|
|
|
|
volume["pool"], volume["source_volume"]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if not volume["pool"] in pools:
|
|
|
|
pools[volume["pool"]] = int(
|
|
|
|
volume_data["stats"]["size"].replace("G", "")
|
|
|
|
)
|
2020-01-08 19:52:54 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
pools[volume["pool"]] += int(
|
|
|
|
volume_data["stats"]["size"].replace("G", "")
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
if not volume["pool"] in pools:
|
|
|
|
pools[volume["pool"]] = volume["disk_size_gb"]
|
2020-01-08 19:52:54 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
pools[volume["pool"]] += volume["disk_size_gb"]
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
for pool in pools:
|
2020-03-15 16:58:13 -04:00
|
|
|
try:
|
2021-05-29 00:26:15 -04:00
|
|
|
pool_information = pvc_ceph.getPoolInformation(zkhandler, pool)
|
2020-03-15 16:58:13 -04:00
|
|
|
if not pool_information:
|
|
|
|
raise
|
2020-11-06 18:55:10 -05:00
|
|
|
except Exception:
|
2020-02-20 22:38:31 -05:00
|
|
|
raise ClusterError('Pool "{}" is not present on the cluster.'.format(pool))
|
2021-11-06 03:02:43 -04:00
|
|
|
pool_free_space_gb = int(
|
|
|
|
pool_information["stats"]["free_bytes"] / 1024 / 1024 / 1024
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
pool_vm_usage_gb = int(pools[pool])
|
|
|
|
|
|
|
|
if pool_vm_usage_gb >= pool_free_space_gb:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ClusterError(
|
|
|
|
'Pool "{}" has only {} GB free and VM requires {} GB.'.format(
|
|
|
|
pool, pool_free_space_gb, pool_vm_usage_gb
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
print("There is enough space on cluster to store VM volumes")
|
|
|
|
|
2020-02-18 14:42:45 -05:00
|
|
|
if not is_ova_install:
|
|
|
|
# Verify that every specified filesystem is valid
|
|
|
|
used_filesystems = list()
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
|
|
|
if volume["source_volume"] is not None:
|
2020-02-18 14:42:45 -05:00
|
|
|
continue
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume["filesystem"] and volume["filesystem"] not in used_filesystems:
|
|
|
|
used_filesystems.append(volume["filesystem"])
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2020-02-18 14:42:45 -05:00
|
|
|
for filesystem in used_filesystems:
|
2021-11-06 03:02:43 -04:00
|
|
|
if filesystem == "swap":
|
2020-02-18 14:42:45 -05:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command("which mkswap")
|
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
2021-11-06 03:27:07 -04:00
|
|
|
"Failed to find binary for mkswap: {}".format(stderr)
|
2021-11-06 03:02:43 -04:00
|
|
|
)
|
2020-02-18 14:42:45 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"which mkfs.{}".format(filesystem)
|
|
|
|
)
|
2020-02-18 14:42:45 -05:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
"Failed to find binary for mkfs.{}: {}".format(
|
|
|
|
filesystem, stderr
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-02-18 14:42:45 -05:00
|
|
|
print("All selected filesystems are valid")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
|
|
|
# Phase 3 - provisioning script preparation
|
|
|
|
# * Import the provisioning script as a library with importlib
|
|
|
|
# * Ensure the required function(s) are present
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 3, "total": 10, "status": "Preparing provisioning script"},
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
time.sleep(1)
|
|
|
|
|
2020-01-05 16:35:55 -05:00
|
|
|
if is_script_install:
|
|
|
|
# Write the script out to a temporary file
|
2020-02-08 23:31:07 -05:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command("mktemp")
|
2020-01-05 16:35:55 -05:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
"Failed to create a temporary file: {}".format(stderr)
|
|
|
|
)
|
2020-01-05 16:35:55 -05:00
|
|
|
script_file = stdout.strip()
|
2021-11-06 03:02:43 -04:00
|
|
|
with open(script_file, "w") as fh:
|
|
|
|
fh.write(vm_data["script"])
|
|
|
|
fh.write("\n")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:35:55 -05:00
|
|
|
# Import the script file
|
2021-11-06 03:02:43 -04:00
|
|
|
loader = importlib.machinery.SourceFileLoader("installer_script", script_file)
|
2020-01-05 16:35:55 -05:00
|
|
|
spec = importlib.util.spec_from_loader(loader.name, loader)
|
|
|
|
installer_script = importlib.util.module_from_spec(spec)
|
|
|
|
loader.exec_module(installer_script)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:35:55 -05:00
|
|
|
# Verify that the install() function is valid
|
2020-11-06 20:37:52 -05:00
|
|
|
if "install" not in dir(installer_script):
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
"Specified script does not contain an install() function."
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-05 16:35:55 -05:00
|
|
|
print("Provisioning script imported successfully")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-01-08 17:47:05 -05:00
|
|
|
# Phase 4 - configuration creation
|
|
|
|
# * Create the libvirt XML configuration
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={
|
|
|
|
"current": 4,
|
|
|
|
"total": 10,
|
|
|
|
"status": "Preparing Libvirt XML configuration",
|
|
|
|
},
|
|
|
|
)
|
2020-01-08 17:47:05 -05:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
print("Creating Libvirt configuration")
|
|
|
|
|
|
|
|
# Get information about VM
|
|
|
|
vm_uuid = uuid.uuid4()
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_description = "PVC provisioner @ {}, profile '{}'".format(
|
|
|
|
datetime.datetime.now(), vm_profile
|
|
|
|
)
|
2020-01-08 17:47:05 -05:00
|
|
|
|
2020-02-08 23:31:07 -05:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command("uname -m")
|
2020-01-08 17:47:05 -05:00
|
|
|
system_architecture = stdout.strip()
|
|
|
|
|
|
|
|
# Begin assembling libvirt schema
|
|
|
|
vm_schema = ""
|
|
|
|
|
|
|
|
vm_schema += libvirt_schema.libvirt_header.format(
|
|
|
|
vm_name=vm_name,
|
|
|
|
vm_uuid=vm_uuid,
|
|
|
|
vm_description=vm_description,
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_memory=vm_data["system_details"]["vram_mb"],
|
|
|
|
vm_vcpus=vm_data["system_details"]["vcpu_count"],
|
|
|
|
vm_architecture=system_architecture,
|
2020-01-08 17:47:05 -05:00
|
|
|
)
|
|
|
|
|
2021-06-21 21:59:57 -04:00
|
|
|
# Add disk devices
|
|
|
|
monitor_list = list()
|
2021-11-06 03:02:43 -04:00
|
|
|
coordinator_names = config["storage_hosts"]
|
2021-06-21 21:59:57 -04:00
|
|
|
for coordinator in coordinator_names:
|
2021-11-06 03:02:43 -04:00
|
|
|
monitor_list.append("{}.{}".format(coordinator, config["storage_domain"]))
|
2021-06-21 21:59:57 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
ceph_storage_secret = config["ceph_storage_secret_uuid"]
|
2021-06-21 21:59:57 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
2021-06-21 21:59:57 -04:00
|
|
|
vm_schema += libvirt_schema.devices_disk_header.format(
|
|
|
|
ceph_storage_secret=ceph_storage_secret,
|
2021-11-06 03:02:43 -04:00
|
|
|
disk_pool=volume["pool"],
|
2021-06-21 21:59:57 -04:00
|
|
|
vm_name=vm_name,
|
2021-11-06 03:02:43 -04:00
|
|
|
disk_id=volume["disk_id"],
|
2021-06-21 21:59:57 -04:00
|
|
|
)
|
|
|
|
for monitor in monitor_list:
|
|
|
|
vm_schema += libvirt_schema.devices_disk_coordinator.format(
|
|
|
|
coordinator_name=monitor,
|
2021-11-06 03:02:43 -04:00
|
|
|
coordinator_ceph_mon_port=config["ceph_monitor_port"],
|
2021-06-21 21:59:57 -04:00
|
|
|
)
|
|
|
|
vm_schema += libvirt_schema.devices_disk_footer
|
|
|
|
|
|
|
|
vm_schema += libvirt_schema.devices_vhostmd
|
|
|
|
|
2020-01-08 17:47:05 -05:00
|
|
|
# Add network devices
|
|
|
|
network_id = 0
|
2021-11-06 03:02:43 -04:00
|
|
|
for network in vm_data["networks"]:
|
|
|
|
vni = network["vni"]
|
|
|
|
if vni in ["upstream", "cluster", "storage"]:
|
2021-06-02 15:57:46 -04:00
|
|
|
eth_bridge = "br{}".format(vni)
|
|
|
|
else:
|
|
|
|
eth_bridge = "vmbr{}".format(vni)
|
2020-01-08 17:47:05 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_id_hex = "{:x}".format(int(vm_id % 16))
|
|
|
|
net_id_hex = "{:x}".format(int(network_id % 16))
|
2020-01-08 17:47:05 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if vm_data.get("mac_template") is not None:
|
|
|
|
mac_prefix = "52:54:01"
|
|
|
|
macgen_template = vm_data["mac_template"]
|
2020-01-08 17:47:05 -05:00
|
|
|
eth_macaddr = macgen_template.format(
|
|
|
|
prefix=mac_prefix,
|
|
|
|
vmid=vm_id_hex,
|
|
|
|
netid=net_id_hex,
|
|
|
|
)
|
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
mac_prefix = "52:54:00"
|
|
|
|
random_octet_A = "{:x}".format(random.randint(16, 238))
|
|
|
|
random_octet_B = "{:x}".format(random.randint(16, 238))
|
|
|
|
random_octet_C = "{:x}".format(random.randint(16, 238))
|
2020-01-08 17:47:05 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
macgen_template = "{prefix}:{octetA}:{octetB}:{octetC}"
|
2020-01-08 17:47:05 -05:00
|
|
|
eth_macaddr = macgen_template.format(
|
|
|
|
prefix=mac_prefix,
|
|
|
|
octetA=random_octet_A,
|
|
|
|
octetB=random_octet_B,
|
2021-11-06 03:02:43 -04:00
|
|
|
octetC=random_octet_C,
|
2020-01-08 17:47:05 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
vm_schema += libvirt_schema.devices_net_interface.format(
|
2021-11-06 03:02:43 -04:00
|
|
|
eth_macaddr=eth_macaddr, eth_bridge=eth_bridge
|
2020-01-08 17:47:05 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
network_id += 1
|
|
|
|
|
|
|
|
# Add default devices
|
|
|
|
vm_schema += libvirt_schema.devices_default
|
|
|
|
|
|
|
|
# Add serial device
|
2021-11-06 03:02:43 -04:00
|
|
|
if vm_data["system_details"]["serial"]:
|
|
|
|
vm_schema += libvirt_schema.devices_serial.format(vm_name=vm_name)
|
2020-01-08 17:47:05 -05:00
|
|
|
|
|
|
|
# Add VNC device
|
2021-11-06 03:02:43 -04:00
|
|
|
if vm_data["system_details"]["vnc"]:
|
|
|
|
if vm_data["system_details"]["vnc_bind"]:
|
|
|
|
vm_vnc_bind = vm_data["system_details"]["vnc_bind"]
|
2020-01-08 17:47:05 -05:00
|
|
|
else:
|
|
|
|
vm_vnc_bind = "127.0.0.1"
|
|
|
|
|
|
|
|
vm_vncport = 5900
|
|
|
|
vm_vnc_autoport = "yes"
|
|
|
|
|
|
|
|
vm_schema += libvirt_schema.devices_vnc.format(
|
|
|
|
vm_vncport=vm_vncport,
|
|
|
|
vm_vnc_autoport=vm_vnc_autoport,
|
2021-11-06 03:02:43 -04:00
|
|
|
vm_vnc_bind=vm_vnc_bind,
|
2020-01-08 17:47:05 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
# Add SCSI controller
|
|
|
|
vm_schema += libvirt_schema.devices_scsi_controller
|
|
|
|
|
|
|
|
# Add footer
|
|
|
|
vm_schema += libvirt_schema.libvirt_footer
|
|
|
|
|
|
|
|
print("Final VM schema:\n{}\n".format(vm_schema))
|
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# All the following steps may require cleanup later on, so catch them here and do cleanup in a Finally block
|
|
|
|
try:
|
|
|
|
# Phase 5 - definition
|
|
|
|
# * Create the VM in the PVC cluster
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 5, "total": 10, "status": "Defining VM on the cluster"},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
if define_vm:
|
|
|
|
print("Defining VM on cluster")
|
2021-11-06 03:02:43 -04:00
|
|
|
node_limit = vm_data["system_details"]["node_limit"]
|
2020-08-06 12:24:04 -04:00
|
|
|
if node_limit:
|
2021-11-06 03:02:43 -04:00
|
|
|
node_limit = node_limit.split(",")
|
|
|
|
node_selector = vm_data["system_details"]["node_selector"]
|
|
|
|
node_autostart = vm_data["system_details"]["node_autostart"]
|
|
|
|
migration_method = vm_data["system_details"]["migration_method"]
|
|
|
|
retcode, retmsg = pvc_vm.define_vm(
|
|
|
|
zkhandler,
|
|
|
|
vm_schema.strip(),
|
|
|
|
target_node,
|
|
|
|
node_limit,
|
|
|
|
node_selector,
|
|
|
|
node_autostart,
|
|
|
|
migration_method,
|
|
|
|
vm_profile,
|
|
|
|
initial_state="provision",
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
print(retmsg)
|
2020-01-08 19:52:54 -05:00
|
|
|
else:
|
2020-08-06 12:24:04 -04:00
|
|
|
print("Skipping VM definition")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Phase 6 - disk creation
|
|
|
|
# * Create each Ceph storage volume for the disks
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 6, "total": 10, "status": "Creating storage volumes"},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
|
|
|
if volume.get("source_volume") is not None:
|
|
|
|
success, message = pvc_ceph.clone_volume(
|
|
|
|
zkhandler,
|
|
|
|
volume["pool"],
|
|
|
|
volume["source_volume"],
|
|
|
|
"{}_{}".format(vm_name, volume["disk_id"]),
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
print(message)
|
|
|
|
if not success:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to clone volume "{}" to "{}".'.format(
|
|
|
|
volume["source_volume"], volume["disk_id"]
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
success, message = pvc_ceph.add_volume(
|
|
|
|
zkhandler,
|
|
|
|
volume["pool"],
|
|
|
|
"{}_{}".format(vm_name, volume["disk_id"]),
|
|
|
|
"{}G".format(volume["disk_size_gb"]),
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
print(message)
|
|
|
|
if not success:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to create volume "{}".'.format(volume["disk_id"])
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Phase 7 - disk mapping
|
|
|
|
# * Map each volume to the local host in order
|
|
|
|
# * Format each volume with any specified filesystems
|
|
|
|
# * If any mountpoints are specified, create a temporary mount directory
|
|
|
|
# * Mount any volumes to their respective mountpoints
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={
|
|
|
|
"current": 7,
|
|
|
|
"total": 10,
|
|
|
|
"status": "Mapping, formatting, and mounting storage volumes locally",
|
|
|
|
},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
2020-02-18 14:42:45 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
|
|
|
dst_volume_name = "{}_{}".format(vm_name, volume["disk_id"])
|
|
|
|
dst_volume = "{}/{}".format(volume["pool"], dst_volume_name)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
if is_ova_install:
|
2021-11-06 03:02:43 -04:00
|
|
|
src_volume_name = volume["volume_name"]
|
|
|
|
src_volume = "{}/{}".format(volume["pool"], src_volume_name)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
"Converting {} source volume {} to raw format on {}".format(
|
|
|
|
volume["volume_format"], src_volume, dst_volume
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Map the target RBD device
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, retmsg = pvc_ceph.map_volume(
|
|
|
|
zkhandler, volume["pool"], dst_volume_name
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if not retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to map destination volume "{}": {}'.format(
|
|
|
|
dst_volume_name, retmsg
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
# Map the source RBD device
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, retmsg = pvc_ceph.map_volume(
|
|
|
|
zkhandler, volume["pool"], src_volume_name
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if not retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to map source volume "{}": {}'.format(
|
|
|
|
src_volume_name, retmsg
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
# Convert from source to target
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
2021-11-06 03:02:43 -04:00
|
|
|
"qemu-img convert -C -f {} -O raw {} {}".format(
|
|
|
|
volume["volume_format"],
|
2020-08-06 12:24:04 -04:00
|
|
|
"/dev/rbd/{}".format(src_volume),
|
2021-11-06 03:02:43 -04:00
|
|
|
"/dev/rbd/{}".format(dst_volume),
|
2020-08-06 12:24:04 -04:00
|
|
|
)
|
|
|
|
)
|
2020-02-18 14:42:45 -05:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to convert {} volume "{}" to raw volume "{}": {}'.format(
|
|
|
|
volume["volume_format"], src_volume, dst_volume, stderr
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Unmap the source RBD device (don't bother later)
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, retmsg = pvc_ceph.unmap_volume(
|
|
|
|
zkhandler, volume["pool"], src_volume_name
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if not retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to unmap source volume "{}": {}'.format(
|
|
|
|
src_volume_name, retmsg
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
# Unmap the target RBD device (don't bother later)
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, retmsg = pvc_ceph.unmap_volume(
|
|
|
|
zkhandler, volume["pool"], dst_volume_name
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if not retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to unmap destination volume "{}": {}'.format(
|
|
|
|
dst_volume_name, retmsg
|
|
|
|
)
|
|
|
|
)
|
2020-02-18 14:42:45 -05:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume.get("source_volume") is not None:
|
2020-08-06 12:24:04 -04:00
|
|
|
continue
|
2020-11-06 19:05:48 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume.get("filesystem") is None:
|
2020-08-06 12:24:04 -04:00
|
|
|
continue
|
|
|
|
|
|
|
|
filesystem_args_list = list()
|
2021-11-06 03:02:43 -04:00
|
|
|
for arg in volume["filesystem_args"].split():
|
|
|
|
arg_entry, *arg_data = arg.split("=")
|
|
|
|
arg_data = "=".join(arg_data)
|
2020-08-06 12:24:04 -04:00
|
|
|
filesystem_args_list.append(arg_entry)
|
|
|
|
filesystem_args_list.append(arg_data)
|
2021-11-06 03:02:43 -04:00
|
|
|
filesystem_args = " ".join(filesystem_args_list)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
"Creating {} filesystem on {}".format(
|
|
|
|
volume["filesystem"], dst_volume
|
|
|
|
)
|
|
|
|
)
|
2021-08-24 11:40:22 -04:00
|
|
|
print("Args: {}".format(filesystem_args))
|
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Map the RBD device
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, retmsg = pvc_ceph.map_volume(
|
|
|
|
zkhandler, volume["pool"], dst_volume_name
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if not retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to map volume "{}": {}'.format(dst_volume, retmsg)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Create the filesystem
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume["filesystem"] == "swap":
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"mkswap -f /dev/rbd/{}".format(dst_volume)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to create swap on "{}": {}'.format(
|
|
|
|
dst_volume, stderr
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
else:
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"mkfs.{} {} /dev/rbd/{}".format(
|
|
|
|
volume["filesystem"], filesystem_args, dst_volume
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to create {} filesystem on "{}": {}'.format(
|
|
|
|
volume["filesystem"], dst_volume, stderr
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-08-24 11:40:22 -04:00
|
|
|
print(stdout)
|
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
if is_script_install:
|
|
|
|
# Create temporary directory
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command("mktemp -d")
|
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
"Failed to create a temporary directory: {}".format(stderr)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
temp_dir = stdout.strip()
|
2020-01-08 19:52:54 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in vm_data["volumes"]:
|
|
|
|
if volume["source_volume"] is not None:
|
2020-08-06 12:24:04 -04:00
|
|
|
continue
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
if not volume["mountpoint"] or volume["mountpoint"] == "swap":
|
2020-08-06 12:24:04 -04:00
|
|
|
continue
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
mapped_dst_volume = "/dev/rbd/{}/{}_{}".format(
|
|
|
|
volume["pool"], vm_name, volume["disk_id"]
|
|
|
|
)
|
|
|
|
mount_path = "{}{}".format(temp_dir, volume["mountpoint"])
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Ensure the mount path exists (within the filesystems)
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"mkdir -p {}".format(mount_path)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to create mountpoint "{}": {}'.format(
|
|
|
|
mount_path, stderr
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Mount filesystems to temporary directory
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"mount {} {}".format(mapped_dst_volume, mount_path)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError(
|
|
|
|
'Failed to mount "{}" on "{}": {}'.format(
|
|
|
|
mapped_dst_volume, mount_path, stderr
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
"Successfully mounted {} on {}".format(
|
|
|
|
mapped_dst_volume, mount_path
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Phase 8 - provisioning script execution
|
|
|
|
# * Execute the provisioning script main function ("install") passing any custom arguments
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 8, "total": 10, "status": "Executing provisioning script"},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
2020-01-05 16:35:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
if is_script_install:
|
|
|
|
print("Running installer script")
|
2020-01-05 16:35:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Parse the script arguments
|
|
|
|
script_arguments = dict()
|
2021-11-06 03:02:43 -04:00
|
|
|
for argument in vm_data["script_arguments"]:
|
|
|
|
argument_name, argument_data = argument.split("=")
|
2020-07-16 02:36:26 -04:00
|
|
|
script_arguments[argument_name] = argument_data
|
2020-07-08 13:18:12 -04:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Parse the runtime arguments
|
|
|
|
if script_run_args is not None:
|
|
|
|
for argument in script_run_args:
|
2021-11-06 03:02:43 -04:00
|
|
|
argument_name, argument_data = argument.split("=")
|
2020-08-06 12:24:04 -04:00
|
|
|
script_arguments[argument_name] = argument_data
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
print("Script arguments: {}".format(script_arguments))
|
2020-01-08 19:52:54 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Run the script
|
|
|
|
try:
|
|
|
|
installer_script.install(
|
|
|
|
vm_name=vm_name,
|
|
|
|
vm_id=vm_id,
|
|
|
|
temporary_directory=temp_dir,
|
2021-11-06 03:02:43 -04:00
|
|
|
disks=vm_data["volumes"],
|
|
|
|
networks=vm_data["networks"],
|
2020-08-06 12:24:04 -04:00
|
|
|
**script_arguments
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2021-11-06 03:02:43 -04:00
|
|
|
raise ProvisioningError("Failed to run install script: {}".format(e))
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
except Exception as e:
|
|
|
|
start_vm = False
|
|
|
|
raise e
|
|
|
|
|
|
|
|
# Always perform the cleanup steps
|
|
|
|
finally:
|
|
|
|
# Phase 9 - install cleanup
|
|
|
|
# * Unmount any mounted volumes
|
|
|
|
# * Remove any temporary directories
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={
|
|
|
|
"current": 9,
|
|
|
|
"total": 10,
|
|
|
|
"status": "Cleaning up local mounts and directories",
|
|
|
|
},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
if not is_ova_install:
|
2021-11-06 03:02:43 -04:00
|
|
|
for volume in list(reversed(vm_data["volumes"])):
|
|
|
|
if volume.get("source_volume") is not None:
|
2020-08-06 12:24:04 -04:00
|
|
|
continue
|
|
|
|
|
|
|
|
if is_script_install:
|
|
|
|
# Unmount the volume
|
2021-11-06 03:02:43 -04:00
|
|
|
if (
|
|
|
|
volume.get("mountpoint") is not None
|
|
|
|
and volume.get("mountpoint") != "swap"
|
|
|
|
):
|
|
|
|
print(
|
|
|
|
"Cleaning up mount {}{}".format(
|
|
|
|
temp_dir, volume["mountpoint"]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
mount_path = "{}{}".format(temp_dir, volume["mountpoint"])
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Make sure any bind mounts or submounts are unmounted first
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume["mountpoint"] == "/":
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"umount {}/**/**".format(mount_path)
|
|
|
|
)
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"umount {}/**".format(mount_path)
|
|
|
|
)
|
|
|
|
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"umount {}".format(mount_path)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
'Failed to unmount "{}": {}'.format(mount_path, stderr)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Unmap the RBD device
|
2021-11-06 03:02:43 -04:00
|
|
|
if volume["filesystem"]:
|
|
|
|
print(
|
|
|
|
"Cleaning up RBD mapping /dev/rbd/{}/{}_{}".format(
|
|
|
|
volume["pool"], vm_name, volume["disk_id"]
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
rbd_volume = "/dev/rbd/{}/{}_{}".format(
|
|
|
|
volume["pool"], vm_name, volume["disk_id"]
|
|
|
|
)
|
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"rbd unmap {}".format(rbd_volume)
|
|
|
|
)
|
2020-02-18 14:42:45 -05:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
'Failed to unmap volume "{}": {}'.format(rbd_volume, stderr)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
print("Cleaning up temporary directories and files")
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
if is_script_install:
|
|
|
|
# Remove temporary mount directory (don't fail if not removed)
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"rmdir {}".format(temp_dir)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
'Failed to delete temporary directory "{}": {}'.format(
|
|
|
|
temp_dir, stderr
|
|
|
|
)
|
|
|
|
)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2020-08-06 12:24:04 -04:00
|
|
|
# Remote temporary script (don't fail if not removed)
|
2021-11-06 03:02:43 -04:00
|
|
|
retcode, stdout, stderr = pvc_common.run_os_command(
|
|
|
|
"rm -f {}".format(script_file)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
if retcode:
|
2021-11-06 03:02:43 -04:00
|
|
|
print(
|
|
|
|
'Failed to delete temporary script file "{}": {}'.format(
|
|
|
|
script_file, stderr
|
|
|
|
)
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
|
|
|
|
# Phase 10 - startup
|
|
|
|
# * Start the VM in the PVC cluster
|
|
|
|
if start_vm:
|
2021-11-06 03:02:43 -04:00
|
|
|
self.update_state(
|
|
|
|
state="RUNNING",
|
|
|
|
meta={"current": 10, "total": 10, "status": "Starting VM"},
|
|
|
|
)
|
2020-08-06 12:24:04 -04:00
|
|
|
time.sleep(1)
|
2021-05-29 00:26:15 -04:00
|
|
|
retcode, retmsg = pvc_vm.start_vm(zkhandler, vm_name)
|
2020-08-06 12:24:04 -04:00
|
|
|
print(retmsg)
|
2019-12-14 14:12:55 -05:00
|
|
|
|
2021-05-30 15:59:37 -04:00
|
|
|
zkhandler.disconnect()
|
|
|
|
del zkhandler
|
|
|
|
|
2021-11-06 03:02:43 -04:00
|
|
|
return {
|
|
|
|
"status": 'VM "{}" with profile "{}" has been provisioned and started successfully'.format(
|
|
|
|
vm_name, vm_profile
|
|
|
|
),
|
|
|
|
"current": 10,
|
|
|
|
"total": 10,
|
|
|
|
}
|