chiark / gitweb /
systemd-python: Update _reader docstrings
[elogind.git] / src / python-systemd / journal.py
index 40e40c360b262a292dc445a4c62e032c0f129523..279662e6a674f295e464d76938588f7b1e247f68 100644 (file)
@@ -26,11 +26,14 @@ import uuid
 import traceback as _traceback
 import os as _os
 import logging as _logging
+if sys.version_info >= (3,):
+    from collections import ChainMap
 from syslog import (LOG_EMERG, LOG_ALERT, LOG_CRIT, LOG_ERR,
                     LOG_WARNING, LOG_NOTICE, LOG_INFO, LOG_DEBUG)
 from ._journal import sendv, stream_fd
 from ._reader import (_Journal, NOP, APPEND, INVALIDATE,
                       LOCAL_ONLY, RUNTIME_ONLY, SYSTEM_ONLY)
+from . import id128 as _id128
 
 _MONOTONIC_CONVERTER = lambda x: datetime.timedelta(microseconds=float(x))
 _REALTIME_CONVERTER = lambda x: datetime.datetime.fromtimestamp(float(x)/1E6)
@@ -89,7 +92,7 @@ class Journal(_Journal):
     def _convert_field(self, key, value):
         try:
             result = self.converters[key](value)
-        except KeyError:
+        except:
             # Default conversion in unicode
             try:
                 result = _convert_unicode(value)
@@ -100,20 +103,36 @@ class Journal(_Journal):
 
     def _convert_entry(self, entry):
         result = {}
-        for key, value in entry.iteritems():
+        for key, value in entry.items():
             if isinstance(value, list):
                 result[key] = [self._convert_field(key, val) for val in value]
             else:
                 result[key] = self._convert_field(key, value)
         return result
 
-    def get_next(self, *args, **kwargs):
+    def add_match(self, *args, **kwargs):
+        args = list(args)
+        args.extend(_make_line(key, val) for key, val in kwargs.items())
+        for arg in args:
+            super(Journal, self).add_match(arg)
+
+    def get_next(self, skip=1):
         return self._convert_entry(
-            super(Journal, self).get_next(*args, **kwargs))
+            super(Journal, self).get_next(skip))
 
-    def query_unique(self, key, *args, **kwargs):
+    def query_unique(self, key):
         return set(self._convert_field(key, value)
-            for value in super(Journal, self).query_unique(key, *args, **kwargs))
+            for value in super(Journal, self).query_unique(key))
+
+    def seek_realtime(self, timestamp):
+        if isinstance(timestamp, datetime.datetime):
+            timestamp = int(timestamp.strftime("%s%f"))
+        return super(Journal, self).seek_realtime(timestamp)
+
+    def seek_monotonic(self, timestamp, bootid=None):
+        if isinstance(timestamp, datetime.timedelta):
+            timestamp = timestamp.totalseconds()
+        return super(Journal, self).seek_monotonic(timestamp, bootid)
 
     def log_level(self, level):
         """Sets maximum log level by setting matches for PRIORITY."""
@@ -123,6 +142,14 @@ class Journal(_Journal):
         else:
             raise ValueError("Log level must be 0 <= level <= 7")
 
+    def this_boot(self):
+        """Add match for _BOOT_ID equal to current boot ID."""
+        self.add_match(_BOOT_ID=_id128.get_boot().hex)
+
+    def this_machine(self):
+        """Add match for _MACHINE_ID equal to the ID of this machine."""
+        self.add_match(_MACHINE_ID=_id128.get_machine().hex)
+
 def _make_line(field, value):
         if isinstance(value, bytes):
                 return field.encode('utf-8') + b'=' + value