Commit 829c7de3 authored by Ondřej Lysoněk's avatar Ondřej Lysoněk

Support sending logs over DBus

Add two DBus calls:
log_capture_start(log_level, timeout)
 * This will instruct Tuned to create a new log handler, which will
   start writing log messages with level 'log_level' and higher to
   a buffer. The log levels are standard Python log levels (e.g.
   logging.INFO) or Tuned's custom log level LOG_LEVEL_CONSOLE which
   is defined as 60. The handler will be destroyed after 'timeout' seconds
   if contents of the buffer are not collected using log_capture_finish()
   before the timeout. If 'timeout' <= 0, log messages will be collected
   for as long as it takes before log_capture_finish() is called (use
   with care, so that you don't fill up memory with Tuned logs). This
   call returns a string ID of the log handler, a token, which should
   be passed to log_capture_finish().

log_capture_finish(token)
 * This will return (as a string) log messages collected by a log handler
   associated with the 'token'. It will also destroy the log handler.

These calls are privileged. They can only be called by the root user or
by a user logged in on a local console (just like the change-profile DBus
calls). This restriction is meant to prevent ordinary users from forcing
Tuned to allocate an insane amount of memory for the log buffers and crash
it (or other processes). In the future we may wish to replace this
restriction with a configurable policy which determines how many (and how
big) log buffers can a user with a given UID create. We may also want to
destroy the log handler when the original caller disconnects from DBus.
Signed-off-by: default avatarOndřej Lysoněk <olysonek@redhat.com>
parent 1be86c85
* Implement a configurable policy which determines how many (and how big)
log buffers can a user with a given UID create using the log_capture_start
DBus call.
* Destroy the log handler created by log_capture_start() when the caller
disconnects from DBus.
* Use only one timer for destroying log handlers at a time. Create a new
timer as necessary when the old timer fires.
......@@ -127,6 +127,26 @@
</defaults>
</action>
<action id="com.redhat.tuned.log_capture_start">
<description>Start log capture</description>
<message>Authentication is required to start log capture</message>
<defaults>
<allow_any>auth_admin</allow_any>
<allow_inactive>auth_admin</allow_inactive>
<allow_active>yes</allow_active>
</defaults>
</action>
<action id="com.redhat.tuned.log_capture_finish">
<description>Stop log capture and return captured log</description>
<message>Authentication is required to stop log capture</message>
<defaults>
<allow_any>auth_admin</allow_any>
<allow_inactive>auth_admin</allow_inactive>
<allow_active>yes</allow_active>
</defaults>
</action>
<action id="com.redhat.tuned.auto_profile">
<description>Enable automatic profile selection mode</description>
<message>Authentication is required to change profile selection mode</message>
......
......@@ -10,6 +10,30 @@ __all__ = ["Controller"]
log = tuned.logs.get()
class TimerStore(object):
def __init__(self):
self._timers = dict()
self._timers_lock = threading.Lock()
def store_timer(self, token, timer):
with self._timers_lock:
self._timers[token] = timer
def drop_timer(self, token):
with self._timers_lock:
try:
timer = self._timers[token]
timer.cancel()
del self._timers[token]
except:
pass
def cancel_all(self):
with self._timers_lock:
for timer in self._timers.values():
timer.cancel()
self._timers.clear()
class Controller(tuned.exports.interfaces.ExportableInterface):
"""
Controller's purpose is to keep the program running, start/stop the tuning,
......@@ -22,6 +46,7 @@ class Controller(tuned.exports.interfaces.ExportableInterface):
self._global_config = global_config
self._terminate = threading.Event()
self._cmd = commands()
self._timer_store = TimerStore()
def run(self):
"""
......@@ -54,6 +79,32 @@ class Controller(tuned.exports.interfaces.ExportableInterface):
# identifying caller (with DBus it's the caller bus name) if authorized and empty
# string if not authorized, caller must be the last argument
def _log_capture_abort(self, token):
tuned.logs.log_capture_finish(token)
self._timer_store.drop_timer(token)
@exports.export("ii", "s")
def log_capture_start(self, log_level, timeout, caller = None):
if caller == "":
return ""
token = tuned.logs.log_capture_start(log_level)
if token is None:
return ""
if timeout > 0:
timer = threading.Timer(timeout,
self._log_capture_abort, args = [token])
self._timer_store.store_timer(token, timer)
timer.start()
return "" if token is None else token
@exports.export("s", "s")
def log_capture_finish(self, token, caller = None):
if caller == "":
return ""
res = tuned.logs.log_capture_finish(token)
self._timer_store.drop_timer(token)
return "" if res is None else res
@exports.export("", "b")
def start(self, caller = None):
if caller == "":
......@@ -70,9 +121,11 @@ class Controller(tuned.exports.interfaces.ExportableInterface):
if caller == "":
return False
if not self._daemon.is_running():
return True
res = True
else:
return self._daemon.stop()
res = self._daemon.stop()
self._timer_store.cancel_all()
return res
@exports.export("", "b")
def reload(self, caller = None):
......
......@@ -5,11 +5,67 @@ import os
import os.path
import inspect
import tuned.consts as consts
import random
import string
import threading
try:
from StringIO import StringIO
except:
from io import StringIO
__all__ = ["get"]
root_logger = None
log_handlers = {}
log_handlers_lock = threading.Lock()
class LogHandler(object):
def __init__(self, handler, stream):
self.handler = handler
self.stream = stream
def _random_string(length):
r = random.SystemRandom()
chars = string.ascii_letters + string.digits
res = ""
for i in range(length):
res += random.choice(chars)
return res
def log_capture_start(log_level):
with log_handlers_lock:
for i in range(10):
token = _random_string(16)
if token not in log_handlers:
break
else:
return None
stream = StringIO()
handler = logging.StreamHandler(stream)
handler.setLevel(log_level)
formatter = logging.Formatter(
"%(levelname)-8s %(name)s: %(message)s")
handler.setFormatter(formatter)
root_logger.addHandler(handler)
log_handler = LogHandler(handler, stream)
log_handlers[token] = log_handler
root_logger.debug("Added log handler %s." % token)
return token
def log_capture_finish(token):
with log_handlers_lock:
try:
log_handler = log_handlers[token]
except KeyError:
return None
content = log_handler.stream.getvalue()
log_handler.stream.close()
root_logger.removeHandler(log_handler.handler)
del log_handlers[token]
root_logger.debug("Removed log handler %s." % token)
return content
def get():
global root_logger
if root_logger is None:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment