I am looking for the way to call shell scripts from python and write their stdout and stderr to file using logging. Here is my code:
I was trying to achieve the same on check_call and check_ouput. I found this solution to be working.
import logging
import threading
import os
import subprocess
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
class LogPipe(threading.Thread):
def __init__(self, level):
"""Setup the object with a logger and a loglevel
and start the thread
"""
threading.Thread.__init__(self)
self.daemon = False
self.level = level
self.fdRead, self.fdWrite = os.pipe()
self.pipeReader = os.fdopen(self.fdRead)
self.start()
def fileno(self):
"""Return the write file descriptor of the pipe"""
return self.fdWrite
def run(self):
"""Run the thread, logging everything."""
for line in iter(self.pipeReader.readline, ''):
logging.log(self.level, line.strip('\n'))
self.pipeReader.close()
def close(self):
"""Close the write end of the pipe."""
os.close(self.fdWrite)
def write(self):
"""If your code has something like sys.stdout.write"""
logging.log(self.level, message)
def flush(self):
"""If you code has something like this sys.stdout.flush"""
pass
After implementing it, I performed the below steps:
try:
# It works on multiple handlers as well
logging.basicConfig(handlers=[logging.FileHandler(log_file), logging.StreamHandler()])
sys.stdout = LogPipe(logging.INFO)
sys.stderr = LogPipe(logging.ERROR)
...
subprocess.check_call(subprocess_cmd, stdout=sys.stdout, stderr=sys.stderr)
export_output = subprocess.check_output(subprocess_cmd, stderr=sys.stderr)
...
finally:
sys.stdout.close()
sys.stderr.close()
# It is neccessary to close the file handlers properly.
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
logging.shutdown()
os.remove(log_file)