Compare commits

..

9 Commits

Author SHA1 Message Date
Joshua Boniface
52aa351c60 Move to lazy imports and custom API client 2025-03-13 00:26:47 -04:00
bb77c5f1fc Move lxml imports to runtime
Avoid loading these if unneeded
2025-03-12 23:50:12 -04:00
fc740927cc Switch to modern Python build system
Remove setuptools and use pyproject.toml instead.
2025-03-12 23:46:52 -04:00
34149fe933 Move multipart.encoder import to runtime
Another library with a ridiculously long load time.
2025-03-12 23:11:42 -04:00
a2fed1885c Remove distutils strtobool
This import takes a ridiculously long time just to implement a function
that can be done in one line in O(1) time.
2025-03-12 23:09:44 -04:00
ee055bdb81 Improve loading efficiency of common.py 2025-03-12 22:55:11 -04:00
60967b5606 Fix formatters colour bug for mirror state 2025-03-02 14:58:26 -05:00
89bfbe1fd8 Add translation of domain UUIDs to names
Allows frontends to better handle the domain list gracefully, as humans
don't care about the UUIDs.
2025-02-28 21:52:42 -05:00
be092756a9 Add cluster name to Zookeeper and log+API output 2025-02-27 00:57:07 -05:00
18 changed files with 570 additions and 341 deletions

1
.gitignore vendored
View File

@@ -8,3 +8,4 @@ debian/pvc-*/
debian/*.log debian/*.log
debian/*.substvars debian/*.substvars
debian/files debian/files
client-cli/build/

View File

@@ -81,6 +81,7 @@ def create_app():
print("|--------------------------------------------------------------|") print("|--------------------------------------------------------------|")
print("| Parallel Virtual Cluster API daemon v{0: <23} |".format(version)) print("| Parallel Virtual Cluster API daemon v{0: <23} |".format(version))
print("| Debug: {0: <53} |".format(str(config["debug"]))) print("| Debug: {0: <53} |".format(str(config["debug"])))
print("| Cluster: {0: <51} |".format(config["cluster_name"]))
print("| API version: v{0: <46} |".format(API_VERSION)) print("| API version: v{0: <46} |".format(API_VERSION))
print( print(
"| Listen: {0: <52} |".format( "| Listen: {0: <52} |".format(

File diff suppressed because it is too large Load Diff

View File

@@ -221,7 +221,7 @@ def cli_cluster_status_format_pretty(CLI_CONFIG, data):
continue continue
if state in ["start"]: if state in ["start"]:
state_colour = ansii["green"] state_colour = ansii["green"]
elif state in ["migrate", "disable", "provision", "mirror"]: elif state in ["migrate", "disable", "provision"]:
state_colour = ansii["blue"] state_colour = ansii["blue"]
elif state in ["mirror"]: elif state in ["mirror"]:
state_colour = ansii["purple"] state_colour = ansii["purple"]

View File

@@ -20,7 +20,6 @@
############################################################################### ###############################################################################
from click import echo as click_echo from click import echo as click_echo
from distutils.util import strtobool
from json import load as jload from json import load as jload
from json import dump as jdump from json import dump as jdump
from os import chmod, environ, getpid, path, get_terminal_size from os import chmod, environ, getpid, path, get_terminal_size
@@ -150,9 +149,7 @@ def get_config(store_data, connection=None):
if connection == "local": if connection == "local":
config["verify_ssl"] = False config["verify_ssl"] = False
else: else:
config["verify_ssl"] = bool( config["verify_ssl"] = environ.get("PVC_CLIENT_VERIFY_SSL", "True") == "True"
strtobool(environ.get("PVC_CLIENT_VERIFY_SSL", "True"))
)
return config return config

View File

@@ -19,13 +19,15 @@
# #
############################################################################### ###############################################################################
import os
import math
import time
import requests
import click
from ast import literal_eval from ast import literal_eval
from urllib3 import disable_warnings from click import echo, progressbar
from math import ceil
from os.path import getsize
from time import time
import socket
import json
import ssl
import base64
def format_bytes(size_bytes): def format_bytes(size_bytes):
@@ -39,7 +41,7 @@ def format_bytes(size_bytes):
} }
human_bytes = "0B" human_bytes = "0B"
for unit in sorted(byte_unit_matrix, key=byte_unit_matrix.get): for unit in sorted(byte_unit_matrix, key=byte_unit_matrix.get):
formatted_bytes = int(math.ceil(size_bytes / byte_unit_matrix[unit])) formatted_bytes = int(ceil(size_bytes / byte_unit_matrix[unit]))
if formatted_bytes < 10000: if formatted_bytes < 10000:
human_bytes = "{}{}".format(formatted_bytes, unit) human_bytes = "{}{}".format(formatted_bytes, unit)
break break
@@ -57,7 +59,7 @@ def format_metric(integer):
} }
human_integer = "0" human_integer = "0"
for unit in sorted(integer_unit_matrix, key=integer_unit_matrix.get): for unit in sorted(integer_unit_matrix, key=integer_unit_matrix.get):
formatted_integer = int(math.ceil(integer / integer_unit_matrix[unit])) formatted_integer = int(ceil(integer / integer_unit_matrix[unit]))
if formatted_integer < 10000: if formatted_integer < 10000:
human_integer = "{}{}".format(formatted_integer, unit) human_integer = "{}{}".format(formatted_integer, unit)
break break
@@ -97,12 +99,12 @@ def format_age(age_secs):
class UploadProgressBar(object): class UploadProgressBar(object):
def __init__(self, filename, end_message="", end_nl=True): def __init__(self, filename, end_message="", end_nl=True):
file_size = os.path.getsize(filename) file_size = getsize(filename)
file_size_human = format_bytes(file_size) file_size_human = format_bytes(file_size)
click.echo("Uploading file (total size {})...".format(file_size_human)) echo("Uploading file (total size {})...".format(file_size_human))
self.length = file_size self.length = file_size
self.time_last = int(round(time.time() * 1000)) - 1000 self.time_last = int(round(time() * 1000)) - 1000
self.bytes_last = 0 self.bytes_last = 0
self.bytes_diff = 0 self.bytes_diff = 0
self.is_end = False self.is_end = False
@@ -114,7 +116,7 @@ class UploadProgressBar(object):
else: else:
self.end_suffix = "" self.end_suffix = ""
self.bar = click.progressbar(length=self.length, width=20, show_eta=True) self.bar = progressbar(length=self.length, width=20, show_eta=True)
def update(self, monitor): def update(self, monitor):
bytes_cur = monitor.bytes_read bytes_cur = monitor.bytes_read
@@ -123,7 +125,7 @@ class UploadProgressBar(object):
self.is_end = True self.is_end = True
self.bytes_last = bytes_cur self.bytes_last = bytes_cur
time_cur = int(round(time.time() * 1000)) time_cur = int(round(time() * 1000))
if (time_cur - 1000) > self.time_last: if (time_cur - 1000) > self.time_last:
self.time_last = time_cur self.time_last = time_cur
self.bar.update(self.bytes_diff) self.bar.update(self.bytes_diff)
@@ -132,20 +134,192 @@ class UploadProgressBar(object):
if self.is_end: if self.is_end:
self.bar.update(self.bytes_diff) self.bar.update(self.bytes_diff)
self.bytes_diff = 0 self.bytes_diff = 0
click.echo() echo()
click.echo() echo()
if self.end_message: if self.end_message:
click.echo(self.end_message + self.end_suffix, nl=self.end_nl) echo(self.end_message + self.end_suffix, nl=self.end_nl)
class ErrorResponse(requests.Response): class Response:
def __init__(self, json_data, status_code, headers): """Minimal Response class to replace requests.Response"""
self.json_data = json_data def __init__(self, status_code, headers, content):
self.status_code = status_code self.status_code = status_code
self.headers = headers self.headers = headers
self.content = content
self._json = None
def json(self): def json(self):
return self.json_data if self._json is None:
try:
self._json = json.loads(self.content.decode('utf-8'))
except json.JSONDecodeError:
self._json = {}
return self._json
class ConnectionError(Exception):
"""Simple ConnectionError class to replace requests.exceptions.ConnectionError"""
pass
class ErrorResponse(Response):
def __init__(self, json_data, status_code, headers):
self.status_code = status_code
self.headers = headers
self._json = json_data
self.content = json.dumps(json_data).encode('utf-8') if json_data else b''
def json(self):
return self._json
def _parse_url(url):
"""Simple URL parser without using urllib"""
if '://' in url:
scheme, rest = url.split('://', 1)
else:
scheme = 'http'
rest = url
if '/' in rest:
host_port, path = rest.split('/', 1)
path = '/' + path
else:
host_port = rest
path = '/'
if ':' in host_port:
host, port_str = host_port.split(':', 1)
port = int(port_str)
else:
host = host_port
port = 443 if scheme == 'https' else 80
return scheme, host, port, path
def _encode_params(params):
"""Simple URL parameter encoder"""
if not params:
return ''
parts = []
for key, value in params.items():
if isinstance(value, bool):
value = str(value).lower()
elif value is None:
value = ''
else:
value = str(value)
parts.append(f"{key}={value}")
return '?' + '&'.join(parts)
def _make_request(method, url, headers=None, params=None, data=None, files=None, timeout=5, verify=True):
"""Simple HTTP client using sockets"""
headers = headers or {}
scheme, host, port, path = _parse_url(url)
# Add query parameters
path += _encode_params(params)
# Prepare body
body = None
if data is not None and files is None:
if isinstance(data, dict):
body = json.dumps(data).encode('utf-8')
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
else:
body = data.encode('utf-8') if isinstance(data, str) else data
# Handle file uploads
if files:
boundary = f'----WebKitFormBoundary{int(time())}'
headers['Content-Type'] = f'multipart/form-data; boundary={boundary}'
body = b''
# Add form fields
if data:
for key, value in data.items():
body += f'--{boundary}\r\n'.encode()
body += f'Content-Disposition: form-data; name="{key}"\r\n\r\n'.encode()
body += f'{value}\r\n'.encode()
# Add files
for key, file_tuple in files.items():
filename, fileobj, content_type = file_tuple
body += f'--{boundary}\r\n'.encode()
body += f'Content-Disposition: form-data; name="{key}"; filename="{filename}"\r\n'.encode()
body += f'Content-Type: {content_type}\r\n\r\n'.encode()
body += fileobj.read()
body += b'\r\n'
body += f'--{boundary}--\r\n'.encode()
# Create socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
try:
# Handle SSL for HTTPS
if scheme == 'https':
context = ssl.create_default_context()
if not verify:
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
s = context.wrap_socket(s, server_hostname=host)
# Connect
s.connect((host, port))
# Build request
request = f"{method} {path} HTTP/1.1\r\n"
request += f"Host: {host}\r\n"
for key, value in headers.items():
request += f"{key}: {value}\r\n"
if body:
request += f"Content-Length: {len(body)}\r\n"
request += "Connection: close\r\n\r\n"
# Send request
s.sendall(request.encode('utf-8'))
if body:
s.sendall(body)
# Read response
response_data = b""
while True:
chunk = s.recv(4096)
if not chunk:
break
response_data += chunk
# Parse response
header_end = response_data.find(b'\r\n\r\n')
headers_raw = response_data[:header_end].decode('utf-8')
body_raw = response_data[header_end + 4:]
# Parse status code
status_line = headers_raw.split('\r\n')[0]
status_code = int(status_line.split(' ')[1])
# Parse headers
headers_dict = {}
for line in headers_raw.split('\r\n')[1:]:
if not line:
continue
key, value = line.split(':', 1)
headers_dict[key.strip()] = value.strip()
return Response(status_code, headers_dict, body_raw)
finally:
s.close()
def call_api( def call_api(
@@ -158,7 +332,7 @@ def call_api(
files=None, files=None,
): ):
# Set the connect timeout to 2 seconds but extremely long (48 hour) data timeout # Set the connect timeout to 2 seconds but extremely long (48 hour) data timeout
timeout = (2.05, 172800) timeout = 2.05
# Craft the URI # Craft the URI
uri = "{}://{}{}{}".format( uri = "{}://{}{}{}".format(
@@ -170,7 +344,6 @@ def call_api(
headers["X-Api-Key"] = config["api_key"] headers["X-Api-Key"] = config["api_key"]
# Determine the request type and hit the API # Determine the request type and hit the API
disable_warnings()
try: try:
response = None response = None
if operation == "get": if operation == "get":
@@ -178,63 +351,66 @@ def call_api(
for i in range(3): for i in range(3):
failed = False failed = False
try: try:
response = requests.get( response = _make_request(
"GET",
uri, uri,
timeout=timeout,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
verify=config["verify_ssl"], timeout=timeout,
verify=config["verify_ssl"]
) )
if response.status_code in retry_on_code: if response.status_code in retry_on_code:
failed = True failed = True
continue continue
break break
except requests.exceptions.ConnectionError: except Exception:
failed = True failed = True
continue continue
if failed: if failed:
error = f"Code {response.status_code}" if response else "Timeout" error = f"Code {response.status_code}" if response else "Timeout"
raise requests.exceptions.ConnectionError( raise ConnectionError(f"Failed to connect after 3 tries ({error})")
f"Failed to connect after 3 tries ({error})" elif operation == "post":
) response = _make_request(
if operation == "post": "POST",
response = requests.post(
uri, uri,
timeout=timeout,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
files=files, files=files,
verify=config["verify_ssl"],
)
if operation == "put":
response = requests.put(
uri,
timeout=timeout, timeout=timeout,
verify=config["verify_ssl"]
)
elif operation == "put":
response = _make_request(
"PUT",
uri,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
files=files, files=files,
verify=config["verify_ssl"],
)
if operation == "patch":
response = requests.patch(
uri,
timeout=timeout, timeout=timeout,
verify=config["verify_ssl"]
)
elif operation == "patch":
response = _make_request(
"PATCH",
uri,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
verify=config["verify_ssl"],
)
if operation == "delete":
response = requests.delete(
uri,
timeout=timeout, timeout=timeout,
verify=config["verify_ssl"]
)
elif operation == "delete":
response = _make_request(
"DELETE",
uri,
headers=headers, headers=headers,
params=params, params=params,
data=data, data=data,
verify=config["verify_ssl"], timeout=timeout,
verify=config["verify_ssl"]
) )
except Exception as e: except Exception as e:
message = "Failed to connect to the API: {}".format(e) message = "Failed to connect to the API: {}".format(e)
@@ -243,10 +419,10 @@ def call_api(
# Display debug output # Display debug output
if config["debug"]: if config["debug"]:
click.echo("API endpoint: {}".format(uri), err=True) echo("API endpoint: {}".format(uri), err=True)
click.echo("Response code: {}".format(response.status_code), err=True) echo("Response code: {}".format(response.status_code), err=True)
click.echo("Response headers: {}".format(response.headers), err=True) echo("Response headers: {}".format(response.headers), err=True)
click.echo(err=True) echo(err=True)
# Return the response object # Return the response object
return response return response

View File

@@ -0,0 +1,17 @@
"""
Lazy import mechanism for PVC CLI to reduce startup time
"""
class LazyModule:
"""
A proxy for a module that is loaded only when actually used
"""
def __init__(self, name):
self.name = name
self._module = None
def __getattr__(self, attr):
if self._module is None:
import importlib
self._module = importlib.import_module(self.name)
return getattr(self._module, attr)

View File

@@ -19,11 +19,6 @@
# #
############################################################################### ###############################################################################
from requests_toolbelt.multipart.encoder import (
MultipartEncoder,
MultipartEncoderMonitor,
)
import pvc.lib.ansiprint as ansiprint import pvc.lib.ansiprint as ansiprint
from pvc.lib.common import UploadProgressBar, call_api, get_wait_retdata from pvc.lib.common import UploadProgressBar, call_api, get_wait_retdata
@@ -549,6 +544,12 @@ def ova_upload(config, name, ova_file, params):
bar = UploadProgressBar( bar = UploadProgressBar(
ova_file, end_message="Parsing file on remote side...", end_nl=False ova_file, end_message="Parsing file on remote side...", end_nl=False
) )
from requests_toolbelt.multipart.encoder import (
MultipartEncoder,
MultipartEncoderMonitor,
)
upload_data = MultipartEncoder( upload_data = MultipartEncoder(
fields={"file": ("filename", open(ova_file, "rb"), "application/octet-stream")} fields={"file": ("filename", open(ova_file, "rb"), "application/octet-stream")}
) )

View File

@@ -23,10 +23,6 @@ import math
from os import path from os import path
from json import loads from json import loads
from requests_toolbelt.multipart.encoder import (
MultipartEncoder,
MultipartEncoderMonitor,
)
import pvc.lib.ansiprint as ansiprint import pvc.lib.ansiprint as ansiprint
from pvc.lib.common import UploadProgressBar, call_api, get_wait_retdata from pvc.lib.common import UploadProgressBar, call_api, get_wait_retdata
@@ -1212,6 +1208,12 @@ def ceph_volume_upload(config, pool, volume, image_format, image_file):
bar = UploadProgressBar( bar = UploadProgressBar(
image_file, end_message="Parsing file on remote side...", end_nl=False image_file, end_message="Parsing file on remote side...", end_nl=False
) )
from requests_toolbelt.multipart.encoder import (
MultipartEncoder,
MultipartEncoderMonitor,
)
upload_data = MultipartEncoder( upload_data = MultipartEncoder(
fields={ fields={
"file": ("filename", open(image_file, "rb"), "application/octet-stream") "file": ("filename", open(image_file, "rb"), "application/octet-stream")

21
client-cli/pyproject.toml Normal file
View File

@@ -0,0 +1,21 @@
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "pvc"
version = "0.9.107"
dependencies = [
"Click",
"PyYAML",
"lxml",
"colorama",
"requests",
"requests-toolbelt",
]
[tool.setuptools]
packages = ["pvc.cli", "pvc.lib"]
[project.scripts]
pvc = "pvc.cli.cli:cli"

View File

@@ -1,20 +0,0 @@
from setuptools import setup
setup(
name="pvc",
version="0.9.107",
packages=["pvc.cli", "pvc.lib"],
install_requires=[
"Click",
"PyYAML",
"lxml",
"colorama",
"requests",
"requests-toolbelt",
],
entry_points={
"console_scripts": [
"pvc = pvc.cli.cli:cli",
],
},
)

View File

@@ -496,6 +496,7 @@ def getClusterInformation(zkhandler):
# Format the status data # Format the status data
cluster_information = { cluster_information = {
"cluster_name": zkhandler.read("base.config"),
"cluster_health": getClusterHealthFromFaults(zkhandler, faults_data), "cluster_health": getClusterHealthFromFaults(zkhandler, faults_data),
"node_health": getNodeHealth(zkhandler, node_list), "node_health": getNodeHealth(zkhandler, node_list),
"maintenance": maintenance_state, "maintenance": maintenance_state,

View File

@@ -1212,3 +1212,7 @@ def get_detect_device(detect_string):
return device return device
else: else:
return None return None
def translate_domains_to_names(zkhandler, domain_list):
return list(zkhandler.read_many([("domain.name", d) for d in domain_list]))

View File

@@ -142,7 +142,9 @@ def getNodeInformation(zkhandler, node_name):
node_mem_free = int(_node_mem_free) node_mem_free = int(_node_mem_free)
node_load = float(_node_load) node_load = float(_node_load)
node_domains_count = int(_node_domains_count) node_domains_count = int(_node_domains_count)
node_running_domains = _node_running_domains.split() node_running_domains = common.translate_domains_to_names(
zkhandler, _node_running_domains.split()
)
try: try:
node_health = int(_node_health) node_health = int(_node_health)

2
debian/compat vendored
View File

@@ -1 +1 @@
9 13

5
debian/rules vendored
View File

@@ -7,13 +7,14 @@ export DH_VERBOSE = 1
dh $@ --with python3 dh $@ --with python3
override_dh_python3: override_dh_python3:
cd $(CURDIR)/client-cli; pybuild --system=distutils --dest-dir=../debian/pvc-client-cli/ cd $(CURDIR)/client-cli; pybuild --system=pyproject --dest-dir=../debian/pvc-client-cli/
mkdir -p debian/pvc-client-cli/usr/lib/python3 mkdir -p debian/pvc-client-cli/usr/lib/python3
mv debian/pvc-client-cli/usr/lib/python3*/* debian/pvc-client-cli/usr/lib/python3/ mv debian/pvc-client-cli/usr/lib/python3*/* debian/pvc-client-cli/usr/lib/python3/
rm -r $(CURDIR)/client-cli/.pybuild $(CURDIR)/client-cli/pvc.egg-info rm -r $(CURDIR)/client-cli/.pybuild $(CURDIR)/client-cli/pvc.egg-info
override_dh_auto_clean: override_dh_auto_clean:
find . -name "__pycache__" -o -name ".pybuild" -exec rm -fr {} + || true find $(CURDIR) -name "__pycache__" -o -name ".pybuild" -exec rm -fr {} + || true
rm -r $(CURDIR)/client-cli/build
# If you need to rebuild the Sphinx documentation # If you need to rebuild the Sphinx documentation
# Add spinxdoc to the dh --with line # Add spinxdoc to the dh --with line

View File

@@ -64,6 +64,7 @@ def entrypoint():
logger.out("|--------------------------------------------------------------|") logger.out("|--------------------------------------------------------------|")
logger.out("| Parallel Virtual Cluster health daemon v{0: <20} |".format(version)) logger.out("| Parallel Virtual Cluster health daemon v{0: <20} |".format(version))
logger.out("| Debug: {0: <53} |".format(str(config["debug"]))) logger.out("| Debug: {0: <53} |".format(str(config["debug"])))
logger.out("| Cluster: {0: <51} |".format(config["cluster_name"]))
logger.out("| FQDN: {0: <54} |".format(config["node_fqdn"])) logger.out("| FQDN: {0: <54} |".format(config["node_fqdn"]))
logger.out("| Host: {0: <54} |".format(config["node_hostname"])) logger.out("| Host: {0: <54} |".format(config["node_hostname"]))
logger.out("| ID: {0: <56} |".format(config["node_id"])) logger.out("| ID: {0: <56} |".format(config["node_id"]))

View File

@@ -83,6 +83,7 @@ def entrypoint():
logger.out("|--------------------------------------------------------------|") logger.out("|--------------------------------------------------------------|")
logger.out("| Parallel Virtual Cluster node daemon v{0: <22} |".format(version)) logger.out("| Parallel Virtual Cluster node daemon v{0: <22} |".format(version))
logger.out("| Debug: {0: <53} |".format(str(config["debug"]))) logger.out("| Debug: {0: <53} |".format(str(config["debug"])))
logger.out("| Cluster: {0: <51} |".format(config["cluster_name"]))
logger.out("| FQDN: {0: <54} |".format(config["node_fqdn"])) logger.out("| FQDN: {0: <54} |".format(config["node_fqdn"]))
logger.out("| Host: {0: <54} |".format(config["node_hostname"])) logger.out("| Host: {0: <54} |".format(config["node_hostname"]))
logger.out("| ID: {0: <56} |".format(config["node_id"])) logger.out("| ID: {0: <56} |".format(config["node_id"]))
@@ -301,6 +302,9 @@ def entrypoint():
# Set up this node in Zookeeper # Set up this node in Zookeeper
pvcnoded.util.zookeeper.setup_node(logger, config, zkhandler) pvcnoded.util.zookeeper.setup_node(logger, config, zkhandler)
# Set the cluster name in Zookeeper
zkhandler.write([("base.config", config["cluster_name"])])
# Check that the primary node key exists and create it with us as primary if not # Check that the primary node key exists and create it with us as primary if not
try: try:
current_primary = zkhandler.read("base.config.primary_node") current_primary = zkhandler.read("base.config.primary_node")