Handle unreachable repos without exploding

This commit is contained in:
Agatha Lovelace 2021-10-05 21:18:30 +03:00
parent 93a871efa7
commit d4bd1df1a1
No known key found for this signature in database
GPG Key ID: 2DB18BA2E0A80BC3
1 changed files with 55 additions and 24 deletions

79
main.py
View File

@ -5,8 +5,9 @@ from rich.console import Group
from rich.progress import Progress, BarColumn from rich.progress import Progress, BarColumn
from rich.panel import Panel from rich.panel import Panel
from rich.style import Style from rich.style import Style
from rich.padding import Padding
import json import json
import os import subprocess
# https://stackoverflow.com/questions/1094841/get-human-readable-version-of-file-size # https://stackoverflow.com/questions/1094841/get-human-readable-version-of-file-size
@ -34,52 +35,82 @@ else:
passphrase = '' passphrase = ''
borg_info = os.popen(passphrase + 'borg info --json ' + config['repo']['path']).read() borg_info_raw = subprocess.run(passphrase + 'borg info --json ' + config['repo']['path'], shell=True, capture_output=True)
# parse repo info from json
borg_info = json.loads(borg_info)
stats = borg_info['cache']['stats'] borg_list = subprocess.run(passphrase + 'borg list --json ' + config['repo']['path'], shell=True, capture_output=True)
csize = int(stats['unique_csize'])
borg_list = os.popen(passphrase + 'borg list --json ' + config['repo']['path']).read()
# parse repo info from json
borg_list = json.loads(borg_list)
last_archive = borg_list['archives'][-1]
# datetime format: https://borgbackup.readthedocs.io/en/stable/internals/frontends.html#standard-output
last_archive_time = datetime.strptime(last_archive['time'], '%Y-%m-%dT%H:%M:%S.%f')
last_archive_time = last_archive_time.strftime('%d/%m/%Y %H:%M')
# get free disk space # get free disk space
df_avail = os.popen(config['disk']['ssh'] + ' "df --block-size=1000 --output=avail ' + config['disk']['partition'] + ' | tail -1"').read().rstrip() df_avail_raw = subprocess.run(config['disk']['ssh'] + ' "df --block-size=1000 --output=avail ' + config['disk']['partition'] + ' | tail -1"', shell=True, capture_output=True)
df_avail = df_avail_raw.stdout.rstrip()
try:
# parse repo info from json
borg_info = json.loads(borg_info_raw.stdout)
# parse repo info from json
borg_list = json.loads(borg_list.stdout)
except json.decoder.JSONDecodeError:
print('[bold red]Unable to reach repo!')
# try to get compressed + deduplicated backup size
try:
stats = borg_info['cache']['stats']
csize = int(stats['unique_csize'])
readable_csize = readable_size(csize)
last_archive = borg_list['archives'][-1]
archive_num = len(borg_list['archives'])
except NameError:
csize = 0
readable_csize = last_archive = archive_num = '??'
# datetime format: https://borgbackup.readthedocs.io/en/stable/internals/frontends.html#standard-output
try:
last_archive_time = datetime.strptime(last_archive['time'], '%Y-%m-%dT%H:%M:%S.%f')
last_archive_time = last_archive_time.strftime('%d/%m/%Y %H:%M')
except TypeError:
last_archive_time = 'Unknown'
# actually print the thing # actually print the thing
emphasis = Style(color="#d89961", bold=True) emphasis = Style(color='#d89961', bold=True)
if borg_info_raw.returncode == 0:
online = Padding('[#82bfe0 bold]◉[/#82bfe0 bold] Host online', (0, 2))
else:
online = Padding('[#34454f bold]◌[/#34454f bold] Host offline', (0, 2))
if df_avail_raw.returncode == 0:
df_avail_bytes = int(df_avail) * 1000
df_avail_readable = readable_size(df_avail_bytes)
else:
# placeholder to make the bar empty
df_avail_bytes = 1000
df_avail_readable = '??'
# it's hacky, but should work as expected # it's hacky, but should work as expected
used = Progress( used = Progress(
'[progress.description]{task.description}', '[progress.description]{task.description}',
# space used # space used
f'[{emphasis}]{readable_size(csize)}[/{emphasis}]', f'[{emphasis}]{readable_csize}[/{emphasis}]',
BarColumn(complete_style=emphasis, finished_style="#d34141"), BarColumn(complete_style=emphasis, finished_style="#d34141"),
# space available # space available
f'[{emphasis}]{readable_size((int(df_avail) * 1000))}[/{emphasis}]', f'[{emphasis}]{df_avail_readable}[/{emphasis}]',
'[progress.percentage]{task.percentage:>3.0f}%' '[progress.percentage]{task.percentage:>3.0f}%'
) )
used.add_task("Used:", completed=csize, total=(int(df_avail) * 1000)) used.add_task('Used:', completed=csize, total=df_avail_bytes)
disk_usage = Panel(used, box=box.DOUBLE, border_style=emphasis) disk_usage = Panel(used, box=box.SQUARE, border_style=emphasis)
avail_backups = Panel(f"Available: [{emphasis}]{len(borg_list['archives'])}[/{emphasis}] backups. Last backup from: [{emphasis}]{last_archive_time}[/{emphasis}]", box=box.DOUBLE, border_style=emphasis) avail_backups = Panel(f"Available: [{emphasis}]{archive_num}[/{emphasis}] backups. Last backup from: [{emphasis}]{last_archive_time}[/{emphasis}]", box=box.SQUARE, border_style=emphasis)
output_group = Group( output_group = Group(
online,
disk_usage, disk_usage,
avail_backups avail_backups
) )
print(Panel( print(Panel.fit(
output_group, output_group,
box=box.SQUARE, border_style="#d89961" box=box.DOUBLE, border_style=emphasis
)) ))