Add node log functions to API and CLI
This commit is contained in:
parent
323c7c41ae
commit
a088aa4484
|
@ -834,6 +834,52 @@ class API_Node_DomainState(Resource):
|
|||
api.add_resource(API_Node_DomainState, '/node/<node>/domain-state')
|
||||
|
||||
|
||||
# /node/<node</log
|
||||
class API_Node_Log(Resource):
|
||||
@RequestParser([
|
||||
{'name': 'lines'}
|
||||
])
|
||||
@Authenticator
|
||||
def get(self, node, reqargs):
|
||||
"""
|
||||
Return the recent logs of {node}
|
||||
---
|
||||
tags:
|
||||
- node
|
||||
parameters:
|
||||
- in: query
|
||||
name: lines
|
||||
type: integer
|
||||
required: false
|
||||
description: The number of lines to retrieve
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
schema:
|
||||
type: object
|
||||
id: NodeLog
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the Node
|
||||
data:
|
||||
type: string
|
||||
description: The recent log text
|
||||
404:
|
||||
description: Node not found
|
||||
schema:
|
||||
type: object
|
||||
id: Message
|
||||
"""
|
||||
return api_helper.node_log(
|
||||
node,
|
||||
reqargs.get('lines', None)
|
||||
)
|
||||
|
||||
|
||||
api.add_resource(API_Node_Log, '/node/<node>/log')
|
||||
|
||||
|
||||
##########################################################
|
||||
# Client API - VM
|
||||
##########################################################
|
||||
|
|
|
@ -307,6 +307,34 @@ def node_ready(zkhandler, node, wait):
|
|||
return output, retcode
|
||||
|
||||
|
||||
@ZKConnection(config)
|
||||
def node_log(zkhandler, node, lines=None):
|
||||
"""
|
||||
Return the current logs for Node.
|
||||
"""
|
||||
# Default to 10 lines of log if not set
|
||||
try:
|
||||
lines = int(lines)
|
||||
except TypeError:
|
||||
lines = 10
|
||||
|
||||
retflag, retdata = pvc_node.get_node_log(zkhandler, node, lines)
|
||||
|
||||
if retflag:
|
||||
retcode = 200
|
||||
retdata = {
|
||||
'name': node,
|
||||
'data': retdata
|
||||
}
|
||||
else:
|
||||
retcode = 400
|
||||
retdata = {
|
||||
'message': retdata
|
||||
}
|
||||
|
||||
return retdata, retcode
|
||||
|
||||
|
||||
#
|
||||
# VM functions
|
||||
#
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
#
|
||||
###############################################################################
|
||||
|
||||
import time
|
||||
|
||||
import pvc.cli_lib.ansiprint as ansiprint
|
||||
from pvc.cli_lib.common import call_api
|
||||
|
||||
|
@ -69,6 +71,91 @@ def node_domain_state(config, node, action, wait):
|
|||
return retstatus, response.json().get('message', '')
|
||||
|
||||
|
||||
def view_node_log(config, node, lines=100):
|
||||
"""
|
||||
Return node log lines from the API (and display them in a pager in the main CLI)
|
||||
|
||||
API endpoint: GET /node/{node}/log
|
||||
API arguments: lines={lines}
|
||||
API schema: {"name":"{node}","data":"{node_log}"}
|
||||
"""
|
||||
params = {
|
||||
'lines': lines
|
||||
}
|
||||
response = call_api(config, 'get', '/node/{node}/log'.format(node=node), params=params)
|
||||
|
||||
if response.status_code != 200:
|
||||
return False, response.json().get('message', '')
|
||||
|
||||
node_log = response.json()['data']
|
||||
|
||||
# Shrink the log buffer to length lines
|
||||
shrunk_log = node_log.split('\n')[-lines:]
|
||||
loglines = '\n'.join(shrunk_log)
|
||||
|
||||
return True, loglines
|
||||
|
||||
|
||||
def follow_node_log(config, node, lines=10):
|
||||
"""
|
||||
Return and follow node log lines from the API
|
||||
|
||||
API endpoint: GET /node/{node}/log
|
||||
API arguments: lines={lines}
|
||||
API schema: {"name":"{nodename}","data":"{node_log}"}
|
||||
"""
|
||||
# We always grab 500 to match the follow call, but only _show_ `lines` number
|
||||
params = {
|
||||
'lines': 500
|
||||
}
|
||||
response = call_api(config, 'get', '/node/{node}/log'.format(node=node), params=params)
|
||||
|
||||
if response.status_code != 200:
|
||||
return False, response.json().get('message', '')
|
||||
|
||||
# Shrink the log buffer to length lines
|
||||
node_log = response.json()['data']
|
||||
shrunk_log = node_log.split('\n')[-int(lines):]
|
||||
loglines = '\n'.join(shrunk_log)
|
||||
|
||||
# Print the initial data and begin following
|
||||
print(loglines, end='')
|
||||
|
||||
while True:
|
||||
# Grab the next line set (500 is a reasonable number of lines per second; any more are skipped)
|
||||
try:
|
||||
params = {
|
||||
'lines': 500
|
||||
}
|
||||
response = call_api(config, 'get', '/node/{node}/log'.format(node=node), params=params)
|
||||
new_node_log = response.json()['data']
|
||||
except Exception:
|
||||
break
|
||||
# Split the new and old log strings into constitutent lines
|
||||
old_node_loglines = node_log.split('\n')
|
||||
new_node_loglines = new_node_log.split('\n')
|
||||
# Set the node log to the new log value for the next iteration
|
||||
node_log = new_node_log
|
||||
# Remove the lines from the old log until we hit the first line of the new log; this
|
||||
# ensures that the old log is a string that we can remove from the new log entirely
|
||||
for index, line in enumerate(old_node_loglines, start=0):
|
||||
if line == new_node_loglines[0]:
|
||||
del old_node_loglines[0:index]
|
||||
break
|
||||
# Rejoin the log lines into strings
|
||||
old_node_log = '\n'.join(old_node_loglines)
|
||||
new_node_log = '\n'.join(new_node_loglines)
|
||||
# Remove the old lines from the new log
|
||||
diff_node_log = new_node_log.replace(old_node_log, "")
|
||||
# If there's a difference, print it out
|
||||
if diff_node_log:
|
||||
print(diff_node_log, end='')
|
||||
# Wait a second
|
||||
time.sleep(1)
|
||||
|
||||
return True, ''
|
||||
|
||||
|
||||
def node_info(config, node):
|
||||
"""
|
||||
Get information about node
|
||||
|
|
|
@ -540,6 +540,43 @@ def node_unflush(node, wait):
|
|||
cleanup(retcode, retmsg)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# pvc node log
|
||||
###############################################################################
|
||||
@click.command(name='log', short_help='Show logs of a node.')
|
||||
@click.argument(
|
||||
'node'
|
||||
)
|
||||
@click.option(
|
||||
'-l', '--lines', 'lines', default=None, show_default=False,
|
||||
help='Display this many log lines from the end of the log buffer. [default: 1000; with follow: 10]'
|
||||
)
|
||||
@click.option(
|
||||
'-f', '--follow', 'follow', is_flag=True, default=False,
|
||||
help='Follow the log buffer; output may be delayed by a few seconds relative to the live system. The --lines value defaults to 10 for the initial output.'
|
||||
)
|
||||
@cluster_req
|
||||
def node_log(node, lines, follow):
|
||||
"""
|
||||
Show node logs of virtual machine DOMAIN on its current node in a pager or continuously. DOMAIN may be a UUID or name. Note that migrating a VM to a different node will cause the log buffer to be overwritten by entries from the new node.
|
||||
"""
|
||||
|
||||
# Set the default here so we can handle it
|
||||
if lines is None:
|
||||
if follow:
|
||||
lines = 10
|
||||
else:
|
||||
lines = 1000
|
||||
|
||||
if follow:
|
||||
retcode, retmsg = pvc_node.follow_node_log(config, node, lines)
|
||||
else:
|
||||
retcode, retmsg = pvc_node.view_node_log(config, node, lines)
|
||||
click.echo_via_pager(retmsg)
|
||||
retmsg = ''
|
||||
cleanup(retcode, retmsg)
|
||||
|
||||
|
||||
###############################################################################
|
||||
# pvc node info
|
||||
###############################################################################
|
||||
|
@ -4707,6 +4744,7 @@ cli_node.add_command(node_primary)
|
|||
cli_node.add_command(node_flush)
|
||||
cli_node.add_command(node_ready)
|
||||
cli_node.add_command(node_unflush)
|
||||
cli_node.add_command(node_log)
|
||||
cli_node.add_command(node_info)
|
||||
cli_node.add_command(node_list)
|
||||
|
||||
|
|
|
@ -182,6 +182,24 @@ def ready_node(zkhandler, node, wait=False):
|
|||
return True, retmsg
|
||||
|
||||
|
||||
def get_node_log(zkhandler, node, lines=2000):
|
||||
# Verify node is valid
|
||||
if not common.verifyNode(zkhandler, node):
|
||||
return False, 'ERROR: No node named "{}" is present in the cluster.'.format(node)
|
||||
|
||||
# Get the data from ZK
|
||||
node_log = zkhandler.read(('logs.messages', node))
|
||||
|
||||
if node_log is None:
|
||||
return True, ''
|
||||
|
||||
# Shrink the log buffer to length lines
|
||||
shrunk_log = node_log.split('\n')[-lines:]
|
||||
loglines = '\n'.join(shrunk_log)
|
||||
|
||||
return True, loglines
|
||||
|
||||
|
||||
def get_info(zkhandler, node):
|
||||
# Verify node is valid
|
||||
if not common.verifyNode(zkhandler, node):
|
||||
|
|
|
@ -144,6 +144,19 @@
|
|||
},
|
||||
"type": "object"
|
||||
},
|
||||
"NodeLog": {
|
||||
"properties": {
|
||||
"data": {
|
||||
"description": "The recent log text",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "The name of the Node",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"VMLog": {
|
||||
"properties": {
|
||||
"data": {
|
||||
|
@ -2665,6 +2678,38 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"/api/v1/node/{node}/log": {
|
||||
"get": {
|
||||
"description": "",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "The number of lines to retrieve",
|
||||
"in": "query",
|
||||
"name": "lines",
|
||||
"required": false,
|
||||
"type": "integer"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/NodeLog"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Node not found",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Message"
|
||||
}
|
||||
}
|
||||
},
|
||||
"summary": "Return the recent logs of {node}",
|
||||
"tags": [
|
||||
"node"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/api/v1/provisioner/create": {
|
||||
"post": {
|
||||
"description": "Note: Starts a background job in the pvc-provisioner-worker Celery worker while returning a task ID; the task ID can be used to query the \"GET /provisioner/status/<task_id>\" endpoint for the job status",
|
||||
|
|
Loading…
Reference in New Issue