Sun, 21 Jan 2018 13:34:12 +0000
More logging detail
backup.py | file | annotate | diff | comparison | revisions | |
borgend.py | file | annotate | diff | comparison | revisions | |
config.py | file | annotate | diff | comparison | revisions | |
instance.py | file | annotate | diff | comparison | revisions | |
ui.py | file | annotate | diff | comparison | revisions |
--- a/backup.py Sun Jan 21 12:10:57 2018 +0000 +++ b/backup.py Sun Jan 21 13:34:12 2018 +0000 @@ -10,6 +10,8 @@ from queue import Queue from threading import Thread, Lock, Timer +logger=logging.getLogger(__name__) + loglevel_translation={ 'CRITICAL': logging.CRITICAL, 'ERROR': logging.ERROR, @@ -122,10 +124,10 @@ assert(not running) def __log_listener(self): - logging.debug('Log listener thread waiting for entries') + logger.debug('Log listener thread waiting for entries') success=True for status in iter(self.borg_instance.read_log, None): - logging.debug(str(status)) + logger.debug(str(status)) t=status['type'] errors_this_message=None @@ -165,23 +167,23 @@ if 'name' not in status: status['name']='borg' lvl=translate_loglevel(status['levelname']) - logging.log(lvl, status['name'] + ': ' + status['message']) + logger.log(lvl, status['name'] + ': ' + status['message']) if lvl>=logging.WARNING: errors_this_message=status with self.lock: self.current_operation['errors']=True status, callback=self.__status_unlocked() else: - logging.debug('Unrecognised log entry %s' % str(status)) + logger.debug('Unrecognised log entry %s' % str(status)) if callback: callback(self, status, errors=errors_this_message) - logging.debug('Waiting for borg subprocess to terminate in log thread') + logger.debug('Waiting for borg subprocess to terminate in log thread') self.borg_instance.wait() - logging.debug('Borg subprocess terminated; terminating log listener thread') + logger.debug('Borg subprocess terminated; terminating log listener thread') def __result_listener(self): with self.lock: @@ -189,22 +191,22 @@ if callback: callback(self, status) - logging.debug('Result listener thread waiting for result') + logger.debug('Result listener thread waiting for result') res=self.borg_instance.read_result() success=True - logging.debug('Borg result: %s' % str(res)) + logger.debug('Borg result: %s' % str(res)) if res==None: success=False - logging.debug('Waiting for borg subprocess to terminate in result thread') + logger.debug('Waiting for borg subprocess to terminate in result thread') success=success and self.borg_instance.wait() - logging.debug('Borg subprocess terminated (success: %s); terminating result listener thread' % str(success)) + logger.debug('Borg subprocess terminated (success: %s); terminating result listener thread' % str(success)) self.thread_log.join() @@ -227,6 +229,8 @@ inst=BorgInstance(op['operation'], archive_or_repository, *args) inst.launch(passphrase=self.__passphrase) + logger.debug('Creating listener threads') + t_log=Thread(target=self.__log_listener) t_log.daemon=True @@ -250,11 +254,11 @@ return False else: if self.timer: - logging.debug('Unscheduling timed operation due to launch of operation') + logger.debug('Unscheduling timed operation due to launch of operation') self.timer=None self.scheduled_operation=None - logging.debug("Launching '%s' on '%s'" % (op['operation'], self.name)) + logger.debug("Launching '%s' on '%s'" % (op['operation'], self.name)) if op['operation']=='create': archive="%s::%s%s" % (self.repository, @@ -270,7 +274,7 @@ self.common_parameters + self.prune_parameters)) else: - logging.error("Invalid operaton '%s'" % op['operation']) + logger.error("Invalid operaton '%s'" % op['operation']) self.__schedule_unlocked() return True @@ -303,7 +307,7 @@ def join(self): - logging.debug('Waiting for borg listener threads to terminate') + logger.debug('Waiting for borg listener threads to terminate') with self.lock: thread_log=self.thread_log
--- a/borgend.py Sun Jan 21 12:10:57 2018 +0000 +++ b/borgend.py Sun Jan 21 13:34:12 2018 +0000 @@ -4,9 +4,11 @@ from threading import Thread logging.basicConfig(#filename='example.log', - format='%(levelname)s:%(message)s', + #format='%(levelname)s:%(message)s', level=logging.DEBUG) +logger=logging.getLogger(__name__) + from backup import Backup from config import settings from queue import Queue @@ -17,7 +19,7 @@ backups=[None]*len(backupconfigs); for i in range(len(backupconfigs)): - logging.info('Setting up backup set %d' % i) + logger.info('Setting up backup set %d' % i) backups[i]=Backup(i, backupconfigs[i]) backups[i].schedule()
--- a/config.py Sun Jan 21 12:10:57 2018 +0000 +++ b/config.py Sun Jan 21 13:34:12 2018 +0000 @@ -11,6 +11,8 @@ import logging from functools import reduce +logger=logging.getLogger(__name__) + # # Defaults # @@ -124,7 +126,7 @@ cfgfile=os.path.join(xdg.XDG_CONFIG_HOME, "borgend", "config.yaml") -logging.info("Reading configuration file %s" % cfgfile) +logger.info("Reading configuration file %s" % cfgfile) if not (os.path.exists(cfgfile) and os.path.isfile(cfgfile)): raise SystemExit('Configuration file required: {cfgfile}')
--- a/instance.py Sun Jan 21 12:10:57 2018 +0000 +++ b/instance.py Sun Jan 21 13:34:12 2018 +0000 @@ -8,6 +8,8 @@ from subprocess import Popen, PIPE from config import settings, arglistify +logger=logging.getLogger(__name__) + necessary_opts=['--log-json', '--progress'] necessary_opts_for={ @@ -39,7 +41,7 @@ def launch(self, passphrase=None): cmd=self.construct_cmdline() - logging.info('Launching ' + str(cmd)) + logger.info('Launching ' + str(cmd)) env=os.environ.copy() if passphrase: @@ -64,13 +66,13 @@ stream=self.proc.stdout line=stream.read(-1) if line==b'': - logging.debug('Borg stdout pipe EOF?') + logger.debug('Borg stdout pipe EOF?') return None try: return json.loads(line) except: - logging.warning('JSON parse failed on: "%s"' % line) + logger.warning('JSON parse failed on: "%s"' % line) return None def read_log(self): @@ -78,7 +80,7 @@ try: line=stream.readline() except err: - logging.debug('Pipe read failed: %s' % str(err)) + logger.debug('Pipe read failed: %s' % str(err)) return {'type': 'log_message', 'levelname': 'CRITICAL', @@ -88,7 +90,7 @@ if line==b'': - logging.debug('Borg stderr pipe EOF?') + logger.debug('Borg stderr pipe EOF?') return None @@ -98,7 +100,7 @@ res['type']='UNKNOWN' return res except: - logging.debug('JSON parse failed on: "%s"' % str(line)) + logger.debug('JSON parse failed on: "%s"' % str(line)) errmsg=line for line in iter(stream.readline, b''):
--- a/ui.py Sun Jan 21 12:10:57 2018 +0000 +++ b/ui.py Sun Jan 21 13:34:12 2018 +0000 @@ -10,6 +10,8 @@ from config import settings import objc +logger=logging.getLogger(__name__) + INACTIVE=0 ACTIVE=1 OFFLINE=2 @@ -146,14 +148,14 @@ def refresh_ui(self): with self.lock: self.refresh_timer=None - logging.debug('Rebuilding menu') + logger.debug('Rebuilding menu') menu, active=self.__rebuild_menu() self.menu.clear() self.menu.update(menu) self.title=traynames[active] def __status_callback(self, obj, index, status, errorlog): - logging.debug('Status callbackup %s' % str(status)) + logger.debug('Status callbackup %s' % str(status)) with self.lock: self.statuses[index]=status @@ -167,7 +169,7 @@ else: msgid=errorlog['msgid'] - logging.debug('Opening notification for error') + logger.debug('Opening notification for error') # Workaround to rumps brokenness # See https://github.com/jaredks/rumps/issues/59 @@ -181,7 +183,7 @@ def __menu_select_backup(self, sender, b): #sender.state=not sender.state - logging.debug("Manually backup '%s'", b.name) + logger.debug("Manually backup '%s'", b.name) b.create(None) @rumps.notifications