Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

log: consume TSS logging and feed into python logging module #164

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 50 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import logging

from test.TSS2_BaseTest import TpmSimulator

from tpm2_pytss import FAPI, FapiConfig
from tpm2_pytss.log import tss_loggers

# highlight using ANSI color codes
green = "\x1b[32m"
yellow = "\x1b[93m"
red = "\x1b[31"
blue = "\x1b[34m"
cyan = "\x1b[96m"
light_grey = "\x1b[37m"
reset = "\x1b[0m"

# setup logging
root_logger = logging.getLogger()
root_logger.setLevel(logging.NOTSET)
handler = logging.StreamHandler()
formatter = logging.Formatter(
f"{light_grey}[%(levelname)s]{reset} {blue}%(pathname)s:%(lineno)d{reset} - {cyan}%(name)s {yellow}%(message)s{reset}",
"%Y-%m-%d %H:%M:%S",
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)

# set some TSS log levels as an example
for _module, logger in tss_loggers.items():
logger.setLevel(logging.WARNING)
logging.getLogger("TSS.fapijson").setLevel(logging.DEBUG)

if __name__ == "__main__":
tpm = TpmSimulator.getSimulator()
tpm.start()

with FapiConfig(
temp_dirs=True, tcti=tpm.tcti_name_conf, ek_cert_less="yes"
) as fapi_config:
with FAPI() as fapi:
fapi.provision()

fapi.create_key(path=f"/{fapi.config.profile_name}/HS/SRK/key_123")

# provoke error logs
fapi.create_key(
path=f"/{fapi.config.profile_name}/HS/SRK/key_123", exists_ok=True
)

tpm.close()
1 change: 1 addition & 0 deletions tpm2_pytss/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .ESAPI import ESAPI
from .FAPI import *
from .TctiLdr import *
from tpm2_pytss.log import *
from .types import *
from .TSS2_Exception import TSS2_Exception
157 changes: 157 additions & 0 deletions tpm2_pytss/log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
import datetime
import logging
import os
import re
import threading
from typing import List, Optional

logger = logging.getLogger(__name__)


def makeRecord(
self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None,
):
"""
A factory method which can be overridden in subclasses to create
specialized LogRecords.
"""
rv = logging.LogRecord(name, level, fn, lno, msg, args, exc_info, func, sinfo)
if extra is not None:
for key in extra:
rv.__dict__[key] = extra[key]
return rv


# monkey-patch Logger.makeRecord to be able to overwrite pathname, lineno and func
logging.Logger.makeRecord = makeRecord


tss_modules = [
"fapi",
"fapijson",
"esys",
"esys_crypto",
"sys",
"marshal",
"tcti",
"log",
"test",
]

tss_loggers = {module: logging.getLogger(f"TSS.{module}") for module in tss_modules}


class RawLogRecord: # TODO doc
def __init__(self, text):
self.timestamp = datetime.datetime.now()
self._text = text

@property
def text(self):
return self._text

@text.setter
def text(self, value):
self.timestamp = datetime.datetime.now()
self._text = value

@property
def is_expired(self):
return datetime.datetime.now() - self.timestamp > datetime.timedelta(seconds=1)


class ConsumeTssLogs(threading.Thread):
def __init__(self):
super().__init__()
self.log_records: List[RawLogRecord] = []
self.pipe_r, self.pipe_w = os.pipe2(os.O_NONBLOCK | os.O_CLOEXEC)

# Redirect TSS logging to pipe
os.environ["TSS2_LOGFILE"] = f"/dev/fd/{self.pipe_w}"

# Crank up TSS logging and filter through the python log levels
os.environ["TSS2_LOG"] = "all+trace"

def run(self):
"""Read TSS logging stream from pipe and process."""
main_dead_counter = 0

while True:
data = b""
while True:
try:
data += os.read(self.pipe_r, 1024)
except BlockingIOError:
# pipe is empty
break

self.process(data)

# end after main thread is dies
if not threading.main_thread().is_alive():
break

self.process(flush=True)
os.close(self.pipe_r)
os.close(self.pipe_w)

def process(self, data: Optional[bytes] = None, flush: bool = True):
"""Parse raw TSS logging stream and feed log records into the python logging system. Caches last record."""
if not data:
if not flush:
return
data = b""

for line in data.decode().splitlines():
if re.match(r"^.+?:.+?:.+?:\d+?:.+?\(\) .+", line):
# line starts new log record
self.log_records.append(RawLogRecord(line))
elif self.log_records:
# line belongs to previous log record
self.log_records[-1].text += f"\n{line}"
else:
logger.error(f"Cannot parse TSS stderr:\n{line}")
continue

# process everything except last log_record (because more there might be more data)
for log_record in self.log_records[:-1]:
self.publish(log_record)

# cache last record (if not expired)
if self.log_records:
last_log_record = self.log_records[-1]
if last_log_record.is_expired:
self.publish(last_log_record)
self.log_records = []
else:
self.log_records = [last_log_record]

@staticmethod
def publish(log_record: RawLogRecord):
"""Parse and public a single log record."""
# parse stderr
level, module, filename, lineno, func, message = re.split(
":| ", log_record.text.strip(), maxsplit=5
)

# map TSS2 logging levels to python levels
level = {
"error": "error",
"warning": "warning",
"info": "info",
"debug": "debug",
"trace": "debug",
}[level.lower()]

# choose logger and log level
log_function = getattr(tss_loggers[module], level)

# pass message to logger
log_function(
f"{message}",
extra={"pathname": filename, "lineno": int(lineno), "func": func},
)


# Consume TSS logging stream
ConsumeTssLogs().start()