2018-08-13 12:20:09 +02:00
|
|
|
#!/usr/bin/env python3
|
2019-10-26 17:09:22 +02:00
|
|
|
# pylint: disable=C0103,W0603,W0621,E1101
|
2018-08-13 12:20:09 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
Meh mail client
|
2018-08-14 17:23:57 +02:00
|
|
|
A dumb Python scripts that leverages notmuch, mbsync, and msmtp
|
|
|
|
to become a fully-functional extremly-opinonated mail client.
|
2018-08-13 12:20:09 +02:00
|
|
|
"""
|
|
|
|
|
2018-08-13 17:59:40 +02:00
|
|
|
# TODO Features
|
2019-10-26 17:09:22 +02:00
|
|
|
# TODO Implement initial command set
|
|
|
|
# TODO Lockfiles for write operations on mail files (mbsync,
|
|
|
|
# tags→maildir operations)
|
|
|
|
# TODO OPTI Lockfile per account and process everything in parallel
|
|
|
|
# (if implemented, this should be optional since while it may speed up
|
|
|
|
# the mail fetching process, its multi-threading nature would cause a
|
|
|
|
# lot of cache flushes and be not very efficient on battery)
|
|
|
|
# TODO Handle true character width
|
|
|
|
# TODO IMAP IDLE watches?
|
|
|
|
# TODO GPG
|
2018-08-13 17:59:40 +02:00
|
|
|
# TODO (only then) Refactor
|
2019-10-26 17:09:22 +02:00
|
|
|
# TODO OOP-based
|
|
|
|
# TODO Merge file with melConf
|
|
|
|
# TODO Un-ignore pyling warnings
|
2018-08-14 17:23:57 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
import argparse
|
|
|
|
import configparser
|
|
|
|
import datetime
|
|
|
|
import email.message
|
|
|
|
import email.parser
|
|
|
|
import html
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2018-08-14 17:23:57 +02:00
|
|
|
import time
|
2019-10-26 17:09:22 +02:00
|
|
|
import typing
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
import colorama
|
2019-10-26 17:09:22 +02:00
|
|
|
import coloredlogs
|
|
|
|
import notmuch
|
2018-08-13 12:20:09 +02:00
|
|
|
import progressbar
|
2018-08-14 10:08:59 +02:00
|
|
|
import xdg.BaseDirectory
|
2018-08-14 17:23:57 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
PERF_LAST = time.perf_counter()
|
|
|
|
PERF_DICT: typing.Dict[str, float] = dict()
|
|
|
|
|
|
|
|
a: typing.Any = 'DEBUG VARIABLE (empty)'
|
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def perfstep(name: str) -> None:
|
|
|
|
"""
|
|
|
|
DEBUG
|
|
|
|
Small performance profiler to measure steps.
|
|
|
|
Call with the name of the step when you just finished it.
|
|
|
|
"""
|
|
|
|
current_time = time.perf_counter()
|
|
|
|
global PERF_LAST
|
|
|
|
global PERF_DICT
|
|
|
|
diff = current_time - PERF_LAST
|
|
|
|
if name not in PERF_DICT:
|
|
|
|
PERF_DICT[name] = 0.0
|
|
|
|
PERF_DICT[name] += diff
|
|
|
|
PERF_LAST = time.perf_counter()
|
2018-08-14 17:23:57 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
ACCOUNTS: typing.Dict[str, configparser.SectionProxy] = dict()
|
|
|
|
ALIASES: typing.Set[str] = set() # All the emails the user is represented as
|
|
|
|
# TODO If the user send emails to himself, maybe that wont cut it.
|
|
|
|
DB = None
|
|
|
|
CONFIG = None
|
|
|
|
|
|
|
|
|
|
|
|
def notmuch_new() -> None:
|
|
|
|
"""
|
|
|
|
Runs `notmuch new`, which basically update the database
|
|
|
|
to match the mail folder.
|
|
|
|
"""
|
2018-08-17 15:08:40 +02:00
|
|
|
close_database()
|
|
|
|
log.info("Indexing mails")
|
2019-10-26 17:09:22 +02:00
|
|
|
notmuchConfigPath = os.path.expanduser(
|
|
|
|
"~/.config/notmuch-config") # TODO Better
|
2018-08-17 15:08:40 +02:00
|
|
|
cmd = ["notmuch", "--config", notmuchConfigPath, "new"]
|
|
|
|
log.debug(" ".join(cmd))
|
2019-10-26 17:09:22 +02:00
|
|
|
subprocess.run(cmd, check=True)
|
2018-08-17 15:08:40 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def list_folders() -> typing.List[typing.Tuple[str, ...]]:
|
|
|
|
"""
|
|
|
|
List all the folders of the mail dir.
|
|
|
|
"""
|
|
|
|
assert CONFIG
|
|
|
|
storagePath = os.path.realpath(
|
|
|
|
os.path.expanduser(CONFIG["GENERAL"]["storage"]))
|
2018-08-17 15:08:40 +02:00
|
|
|
folders = list()
|
2019-10-26 17:09:22 +02:00
|
|
|
for account in ACCOUNTS:
|
2018-08-17 15:08:40 +02:00
|
|
|
storagePathAccount = os.path.join(storagePath, account)
|
2019-10-26 17:09:22 +02:00
|
|
|
for root, dirs, _ in os.walk(storagePathAccount):
|
2018-08-17 15:08:40 +02:00
|
|
|
if "cur" not in dirs or "new" not in dirs or "tmp" not in dirs:
|
|
|
|
continue
|
|
|
|
assert root.startswith(storagePath)
|
|
|
|
path = root[len(storagePath):]
|
|
|
|
pathSplit = path.split('/')
|
|
|
|
if pathSplit[0] == '':
|
|
|
|
pathSplit = pathSplit[1:]
|
|
|
|
folders.append(tuple(pathSplit))
|
|
|
|
return folders
|
2018-08-14 17:23:57 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def open_database(write: bool = False) -> None:
|
|
|
|
"""
|
|
|
|
Open an access notmuch database in read or read+write mode.
|
|
|
|
It is stored in the global DB.
|
|
|
|
Be sure to require only in the mode you want to avoid deadlocks.
|
|
|
|
"""
|
|
|
|
assert CONFIG
|
|
|
|
global DB
|
|
|
|
mode = notmuch.Database.MODE.READ_WRITE if write \
|
|
|
|
else notmuch.Database.MODE.READ_ONLY
|
|
|
|
if DB:
|
|
|
|
if DB.mode == mode:
|
2018-08-17 15:08:40 +02:00
|
|
|
return
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Current database not in mode %s, closing", mode)
|
|
|
|
close_database()
|
|
|
|
log.info("Opening database in mode %s", mode)
|
|
|
|
dbPath = os.path.realpath(os.path.expanduser(CONFIG["GENERAL"]["storage"]))
|
|
|
|
DB = notmuch.Database(mode=mode, path=dbPath)
|
|
|
|
|
|
|
|
|
|
|
|
def close_database() -> None:
|
|
|
|
"""
|
|
|
|
Close the access notmuch database in read or read+write mode.
|
|
|
|
It is stored in the global DB.
|
|
|
|
"""
|
|
|
|
global DB
|
|
|
|
if DB:
|
2018-08-17 15:08:40 +02:00
|
|
|
log.info("Closing database")
|
2019-10-26 17:09:22 +02:00
|
|
|
DB.close()
|
|
|
|
DB = None
|
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def generate_aliases() -> None:
|
|
|
|
assert CONFIG
|
|
|
|
for name in CONFIG.sections():
|
2018-08-14 10:08:59 +02:00
|
|
|
if not name.islower():
|
|
|
|
continue
|
2019-10-26 17:09:22 +02:00
|
|
|
section = CONFIG[name]
|
2018-08-14 10:08:59 +02:00
|
|
|
ALIASES.add(section["from"])
|
|
|
|
if "alternatives" in section:
|
|
|
|
for alt in section["alternatives"].split(";"):
|
|
|
|
ALIASES.add(alt)
|
|
|
|
ACCOUNTS[name] = section
|
2018-08-13 12:20:09 +02:00
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
MailLocation = typing.NewType('MailLocation', typing.Tuple[str, str, str])
|
|
|
|
|
|
|
|
|
|
|
|
def get_location(msg: notmuch.Message) -> MailLocation:
|
|
|
|
"""
|
|
|
|
Return the filesystem location (relative to the mail directory)
|
|
|
|
of the given message.
|
|
|
|
"""
|
2018-08-13 12:20:09 +02:00
|
|
|
path = msg.get_filename()
|
|
|
|
path = os.path.dirname(path)
|
2019-10-26 17:09:22 +02:00
|
|
|
assert DB
|
|
|
|
base = DB.get_path()
|
2018-08-13 12:20:09 +02:00
|
|
|
assert path.startswith(base)
|
|
|
|
path = path[len(base):]
|
2018-08-13 17:59:40 +02:00
|
|
|
pathSplit = path.split('/')
|
|
|
|
mailbox = pathSplit[1]
|
2018-08-14 10:08:59 +02:00
|
|
|
assert mailbox in ACCOUNTS
|
2018-08-13 17:59:40 +02:00
|
|
|
state = pathSplit[-1]
|
|
|
|
folder = tuple(pathSplit[2:-1])
|
2018-08-13 12:20:09 +02:00
|
|
|
assert state in {'cur', 'tmp', 'new'}
|
|
|
|
return (mailbox, folder, state)
|
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
MAILBOX_COLORS: typing.Dict[str, str] = dict()
|
|
|
|
|
|
|
|
|
|
|
|
def get_mailbox_color(mailbox: str) -> str:
|
|
|
|
"""
|
|
|
|
Return the color of the given mailbox in a ready to print
|
|
|
|
string with ASCII escape codes.
|
|
|
|
"""
|
|
|
|
# TODO Do not use 256³ colors but 16 colors
|
|
|
|
assert CONFIG
|
2018-08-13 12:20:09 +02:00
|
|
|
if mailbox not in MAILBOX_COLORS:
|
2019-10-26 17:09:22 +02:00
|
|
|
colorStr = CONFIG[mailbox]["color"]
|
2018-08-13 12:20:09 +02:00
|
|
|
colorStr = colorStr[1:] if colorStr[0] == '#' else colorStr
|
|
|
|
R = int(colorStr[0:2], 16)
|
|
|
|
G = int(colorStr[2:4], 16)
|
|
|
|
B = int(colorStr[4:6], 16)
|
|
|
|
MAILBOX_COLORS[mailbox] = '\x1b[38;2;{};{};{}m'.format(R, G, B)
|
|
|
|
return MAILBOX_COLORS[mailbox]
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def format_date(date: datetime.datetime) -> str:
|
|
|
|
"""
|
|
|
|
Format the given date as a 9-characters width string.
|
|
|
|
Show the time if the mail is less than 24h old,
|
|
|
|
else show the date.
|
|
|
|
"""
|
|
|
|
# TODO Do as the description say
|
2018-08-13 12:20:09 +02:00
|
|
|
now = datetime.datetime.now()
|
|
|
|
midnight = datetime.datetime(year=now.year, month=now.month, day=now.day)
|
|
|
|
if date > midnight:
|
|
|
|
return date.strftime('%H:%M:%S')
|
2019-10-26 17:09:22 +02:00
|
|
|
# TODO Use my favourite date system
|
|
|
|
return date.strftime('%d/%m/%y')
|
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
WIDTH_FIXED = 31
|
2018-08-13 17:59:40 +02:00
|
|
|
WIDTH_RATIO_DEST_SUBJECT = 0.3
|
2018-08-14 17:23:57 +02:00
|
|
|
ISATTY = sys.stdout.isatty()
|
2019-10-26 17:09:22 +02:00
|
|
|
DEST_WIDTH: typing.Optional[int] = None
|
|
|
|
SUBJECT_WIDTH: typing.Optional[int] = None
|
|
|
|
|
|
|
|
|
|
|
|
def compute_line_format() -> None:
|
|
|
|
"""
|
|
|
|
Based on the terminal width, assign the width of flexible columns.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
if ISATTY:
|
2019-10-26 17:09:22 +02:00
|
|
|
columns, _ = shutil.get_terminal_size((80, 20))
|
2018-08-14 17:23:57 +02:00
|
|
|
remain = columns - WIDTH_FIXED - 1
|
2019-10-26 17:09:22 +02:00
|
|
|
global DEST_WIDTH, SUBJECT_WIDTH
|
|
|
|
DEST_WIDTH = int(remain * WIDTH_RATIO_DEST_SUBJECT)
|
|
|
|
SUBJECT_WIDTH = remain - DEST_WIDTH
|
2018-08-14 17:23:57 +02:00
|
|
|
else:
|
2019-10-26 17:09:22 +02:00
|
|
|
DEST_WIDTH = None
|
|
|
|
SUBJECT_WIDTH = None
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def clip_text(size: int, text: str) -> str:
|
|
|
|
"""
|
|
|
|
Fit text into the given character size,
|
|
|
|
fill with spaces if shorter,
|
|
|
|
clip with … if larger.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
if size is None:
|
|
|
|
return text
|
2019-10-26 17:09:22 +02:00
|
|
|
length = len(text)
|
|
|
|
if length == size:
|
2018-08-13 17:59:40 +02:00
|
|
|
return text
|
2019-10-26 17:09:22 +02:00
|
|
|
if length > size:
|
2018-08-13 17:59:40 +02:00
|
|
|
return text[:size-1] + '…'
|
2019-10-26 17:09:22 +02:00
|
|
|
return text + ' ' * (size - length)
|
|
|
|
|
|
|
|
|
|
|
|
def isUID(uid: typing.Any) -> bool:
|
|
|
|
"""
|
|
|
|
Tells if the provided string is a valid UID.
|
|
|
|
"""
|
|
|
|
return isinstance(uid, str) and len(uid) == 12 \
|
|
|
|
and bool(re.match('^[a-zA-Z0-9+/]{12}$', uid))
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def print_msg(msg: notmuch.Message) -> None:
|
|
|
|
"""
|
|
|
|
Print the given message header on one line.
|
|
|
|
"""
|
|
|
|
if not DEST_WIDTH:
|
2018-08-13 17:59:40 +02:00
|
|
|
compute_line_format()
|
2019-10-26 17:09:22 +02:00
|
|
|
assert DEST_WIDTH and SUBJECT_WIDTH
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
sep = " " if ISATTY else "\t"
|
2018-08-13 12:20:09 +02:00
|
|
|
line = ""
|
|
|
|
tags = set(msg.get_tags())
|
2019-10-26 17:09:22 +02:00
|
|
|
mailbox, _, _ = get_location(msg)
|
2018-08-14 17:23:57 +02:00
|
|
|
if ISATTY:
|
|
|
|
line += get_mailbox_color(mailbox)
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
# UID
|
|
|
|
uid = None
|
|
|
|
for tag in tags:
|
|
|
|
if tag.startswith('tuid'):
|
|
|
|
uid = tag[4:]
|
2019-10-26 17:09:22 +02:00
|
|
|
assert uid and isUID(uid), "{uid} ({type(UID)}) is not a valid UID."
|
2018-08-14 17:23:57 +02:00
|
|
|
line += uid
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2018-08-13 12:20:09 +02:00
|
|
|
# Date
|
2018-08-14 17:23:57 +02:00
|
|
|
line += sep
|
2018-08-13 12:20:09 +02:00
|
|
|
date = datetime.datetime.fromtimestamp(msg.get_date())
|
|
|
|
line += format_date(date)
|
|
|
|
|
|
|
|
# Icons
|
2018-08-14 17:23:57 +02:00
|
|
|
line += sep
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def tags2col1(tag1: str, tag2: str,
|
|
|
|
characters: typing.Tuple[str, str, str, str]) -> None:
|
|
|
|
"""
|
|
|
|
Show the presence/absence of two tags with one character.
|
|
|
|
"""
|
2018-08-13 12:20:09 +02:00
|
|
|
nonlocal line
|
2019-10-26 17:09:22 +02:00
|
|
|
both, first, second, none = characters
|
2018-08-13 12:20:09 +02:00
|
|
|
if tag1 in tags:
|
|
|
|
if tag2 in tags:
|
|
|
|
line += both
|
|
|
|
else:
|
|
|
|
line += first
|
|
|
|
else:
|
|
|
|
if tag2 in tags:
|
|
|
|
line += second
|
|
|
|
else:
|
|
|
|
line += none
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
tags2col1('spam', 'draft', ('?', 'S', 'D', ' '))
|
|
|
|
tags2col1('attachment', 'encrypted', ('E', 'A', 'E', ' '))
|
|
|
|
tags2col1('unread', 'flagged', ('!', 'U', 'F', ' '))
|
|
|
|
tags2col1('sent', 'replied', ('?', '↑', '↪', ' '))
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2018-08-13 17:59:40 +02:00
|
|
|
if 'sent' in tags:
|
|
|
|
dest = msg.get_header("to")
|
|
|
|
else:
|
|
|
|
dest = msg.get_header("from")
|
2018-08-14 17:23:57 +02:00
|
|
|
line += sep
|
2019-10-26 17:09:22 +02:00
|
|
|
line += clip_text(DEST_WIDTH, dest)
|
2018-08-13 12:20:09 +02:00
|
|
|
|
|
|
|
# Subject
|
2018-08-14 17:23:57 +02:00
|
|
|
line += sep
|
2018-08-13 17:59:40 +02:00
|
|
|
subject = msg.get_header("subject")
|
2019-10-26 17:09:22 +02:00
|
|
|
line += clip_text(SUBJECT_WIDTH, subject)
|
2018-08-13 17:59:40 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
if ISATTY:
|
|
|
|
line += colorama.Style.RESET_ALL
|
2018-08-13 12:20:09 +02:00
|
|
|
print(line)
|
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def extract_email(field: str) -> str:
|
|
|
|
"""
|
|
|
|
Extract the email adress from a To: or From: field
|
|
|
|
(usually the whole field or between < >)
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
sta = field.index('<')
|
|
|
|
sto = field.index('>')
|
|
|
|
return field[sta+1:sto]
|
|
|
|
except ValueError:
|
|
|
|
return field
|
|
|
|
|
|
|
|
|
|
|
|
def retag_msg(msg: notmuch.Message) -> None:
|
|
|
|
"""
|
|
|
|
Update automatic tags for message.
|
|
|
|
"""
|
|
|
|
_, folder, _ = get_location(msg)
|
2018-08-13 12:20:09 +02:00
|
|
|
|
|
|
|
# Search-friendly folder name
|
2018-08-13 17:59:40 +02:00
|
|
|
slugFolderList = list()
|
|
|
|
for f, fold in [(f, folder[f]) for f in range(len(folder))]:
|
|
|
|
if f == 0 and len(folder) > 1 and fold == "INBOX":
|
|
|
|
continue
|
|
|
|
slugFolderList.append(fold.upper())
|
|
|
|
slugFolder = tuple(slugFolderList)
|
2018-08-13 12:20:09 +02:00
|
|
|
|
2018-08-13 17:59:40 +02:00
|
|
|
tags = set(msg.get_tags())
|
2018-10-18 21:14:11 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def tag_if(tag: str, condition: bool) -> None:
|
|
|
|
"""
|
|
|
|
Ensure the presence/absence of tag depending on the condition.
|
|
|
|
"""
|
|
|
|
nonlocal msg
|
2018-08-13 17:59:40 +02:00
|
|
|
if condition and tag not in tags:
|
|
|
|
msg.add_tag(tag)
|
|
|
|
elif not condition and tag in tags:
|
|
|
|
msg.remove_tag(tag)
|
|
|
|
expeditor = extract_email(msg.get_header('from'))
|
|
|
|
|
|
|
|
tag_if('inbox', slugFolder[0] == 'INBOX')
|
2019-10-26 17:09:22 +02:00
|
|
|
tag_if('spam', slugFolder[0] in ('JUNK', 'SPAM'))
|
2018-08-13 17:59:40 +02:00
|
|
|
tag_if('deleted', slugFolder[0] == 'TRASH')
|
|
|
|
tag_if('draft', slugFolder[0] == 'DRAFTS')
|
2018-08-14 10:08:59 +02:00
|
|
|
tag_if('sent', expeditor in ALIASES)
|
2018-08-14 17:23:57 +02:00
|
|
|
tag_if('unprocessed', False)
|
|
|
|
|
|
|
|
# UID
|
|
|
|
uid = msg.get_header("X-TUID")
|
2018-10-18 21:14:11 +02:00
|
|
|
if not isUID(uid):
|
2019-10-26 17:09:22 +02:00
|
|
|
# TODO Happens to sent mails but should it?
|
2018-10-18 21:14:11 +02:00
|
|
|
print(f"{msg.get_filename()} has no UID!")
|
|
|
|
return
|
2018-08-14 17:23:57 +02:00
|
|
|
uidtag = 'tuid{}'.format(uid)
|
|
|
|
# Remove eventual others UID
|
|
|
|
for tag in tags:
|
|
|
|
if tag.startswith('tuid') and tag != uidtag:
|
|
|
|
msg.remove_tag(tag)
|
|
|
|
msg.add_tag(uidtag)
|
2018-08-13 12:20:09 +02:00
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def applyMsgs(queryStr: str, action: typing.Callable, *args: typing.Any,
|
|
|
|
showProgress: bool = False, write: bool = False,
|
|
|
|
closeDb: bool = True, **kwargs: typing.Any) -> int:
|
|
|
|
"""
|
|
|
|
Run a function on the messages selected by the given query.
|
|
|
|
"""
|
2018-08-17 15:08:40 +02:00
|
|
|
open_database(write=write)
|
2018-08-14 17:23:57 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Querying %s", queryStr)
|
|
|
|
query = notmuch.Query(DB, queryStr)
|
2018-08-14 10:08:59 +02:00
|
|
|
query.set_sort(notmuch.Query.SORT.OLDEST_FIRST)
|
|
|
|
|
|
|
|
elements = query.search_messages()
|
2018-08-14 17:23:57 +02:00
|
|
|
nbMsgs = query.count_messages()
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
iterator = progressbar.progressbar(
|
|
|
|
elements, max_value=nbMsgs) if showProgress else elements
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Executing %s", action)
|
2018-08-14 10:08:59 +02:00
|
|
|
for msg in iterator:
|
2018-08-14 17:23:57 +02:00
|
|
|
if write:
|
|
|
|
msg.freeze()
|
|
|
|
|
2018-08-14 10:08:59 +02:00
|
|
|
action(msg, *args, **kwargs)
|
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
if write:
|
|
|
|
msg.thaw()
|
|
|
|
msg.tags_to_maildir_flags()
|
|
|
|
|
|
|
|
if closeDb:
|
|
|
|
close_database()
|
|
|
|
|
|
|
|
return nbMsgs
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def notify_msg(msg: notmuch.Message) -> None:
|
|
|
|
"""
|
|
|
|
Send a notification for the given message.
|
|
|
|
"""
|
|
|
|
log.info("Sending notification for %s", msg)
|
2018-08-14 17:23:57 +02:00
|
|
|
subject = msg.get_header("subject")
|
|
|
|
expd = msg.get_header("from")
|
|
|
|
account, _, _ = get_location(msg)
|
|
|
|
|
|
|
|
summary = '{} (<i>{}</i>)'.format(html.escape(expd), account)
|
|
|
|
body = html.escape(subject)
|
|
|
|
cmd = ["notify-send", "-u", "low", "-i", "mail-message-new", summary, body]
|
|
|
|
print(' '.join(cmd))
|
2019-10-26 17:09:22 +02:00
|
|
|
subprocess.run(cmd, check=False)
|
2018-08-14 17:23:57 +02:00
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def notify_all() -> None:
|
|
|
|
"""
|
|
|
|
Send a notification for unprocessed and unread message.
|
|
|
|
Basically should only send a notification for a given message once
|
|
|
|
since it should be marked as processed right after.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
open_database()
|
|
|
|
nbMsgs = applyMsgs('tag:unread and tag:unprocessed', notify_msg)
|
|
|
|
if nbMsgs > 0:
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Playing notification sound (%d new message(s))", nbMsgs)
|
|
|
|
cmd = ["play", "-n", "synth", "sine", "E4", "sine", "A5",
|
|
|
|
"remix", "1-2", "fade", "0.5", "1.2", "0.5", "2"]
|
|
|
|
subprocess.run(cmd, check=False)
|
2018-08-14 17:23:57 +02:00
|
|
|
close_database()
|
|
|
|
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def chunks(l: str, n: int) -> typing.Iterable[str]:
|
2018-08-14 17:23:57 +02:00
|
|
|
"""Yield successive n-sized chunks from l."""
|
2019-10-26 17:09:22 +02:00
|
|
|
# From https://stackoverflow.com/a/312464
|
2018-08-14 17:23:57 +02:00
|
|
|
for i in range(0, len(l), n):
|
|
|
|
yield l[i:i + n]
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def apply_msgs_input(argmessages: typing.List[str], action: typing.Callable,
|
|
|
|
write: bool = False) -> None:
|
|
|
|
"""
|
|
|
|
Run a function on the message given by the user.
|
|
|
|
"""
|
|
|
|
if not argmessages:
|
2018-08-14 17:23:57 +02:00
|
|
|
fromStdin = not sys.stdin.isatty()
|
2019-10-26 17:09:22 +02:00
|
|
|
if argmessages:
|
2018-08-14 17:23:57 +02:00
|
|
|
fromStdin = len(argmessages) == 1 and argmessages == '-'
|
|
|
|
|
|
|
|
messages = list()
|
|
|
|
if fromStdin:
|
|
|
|
for line in sys.stdin:
|
|
|
|
uid = line[:12]
|
|
|
|
if not isUID(uid):
|
2019-10-26 17:09:22 +02:00
|
|
|
log.error("Not an UID: %s", uid)
|
2018-08-14 17:23:57 +02:00
|
|
|
continue
|
|
|
|
messages.append(uid)
|
|
|
|
else:
|
|
|
|
for uids in argmessages:
|
|
|
|
if len(uids) > 12:
|
2019-10-26 17:09:22 +02:00
|
|
|
log.warning("Might have forgotten some spaces between the " +
|
|
|
|
"UIDs. Don't worry, I'll split them for you")
|
2018-08-14 17:23:57 +02:00
|
|
|
for uid in chunks(uids, 12):
|
|
|
|
if not isUID(uid):
|
2019-10-26 17:09:22 +02:00
|
|
|
log.error("Not an UID: %s", uid)
|
2018-08-14 17:23:57 +02:00
|
|
|
continue
|
|
|
|
messages.append(uid)
|
|
|
|
|
|
|
|
for message in messages:
|
2019-10-26 17:09:22 +02:00
|
|
|
queryStr = f'tag:tuid{message}'
|
2018-08-14 19:25:07 +02:00
|
|
|
nbMsgs = applyMsgs(queryStr, action, write=write, closeDb=False)
|
2018-08-14 17:23:57 +02:00
|
|
|
if nbMsgs < 1:
|
2019-10-26 17:09:22 +02:00
|
|
|
log.error("Couldn't execute function for message %s", message)
|
2018-08-14 19:25:07 +02:00
|
|
|
close_database()
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def format_header_value(val: str) -> str:
|
|
|
|
"""
|
|
|
|
Return split header values in a contiguous string.
|
|
|
|
"""
|
2018-08-14 19:25:07 +02:00
|
|
|
return val.replace('\n', '').replace('\t', '').strip()
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def sizeof_fmt(num: int, suffix: str = 'B') -> str:
|
|
|
|
"""
|
|
|
|
Print the given size in a human-readable format.
|
|
|
|
"""
|
|
|
|
remainder = float(num)
|
|
|
|
# From https://stackoverflow.com/a/1094933
|
|
|
|
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
|
|
|
if abs(remainder) < 1024.0:
|
|
|
|
return "%3.1f %s%s" % (remainder, unit, suffix)
|
|
|
|
remainder /= 1024.0
|
|
|
|
return "%.1f %s%s" % (remainder, 'Yi', suffix)
|
|
|
|
|
|
|
|
|
|
|
|
PART_MULTI_FORMAT = colorama.Fore.BLUE + \
|
|
|
|
'{nb} {indent}+ {typ}' + colorama.Style.RESET_ALL
|
|
|
|
PART_LEAF_FORMAT = colorama.Fore.BLUE + \
|
|
|
|
'{nb} {indent}→ {desc} ({typ}; {size})' + \
|
|
|
|
colorama.Style.RESET_ALL
|
|
|
|
|
|
|
|
|
|
|
|
def show_parts_tree(part: email.message.Message,
|
|
|
|
depth: int = 0, nb: int = 1) -> int:
|
|
|
|
"""
|
|
|
|
Show a tree of the parts contained in a message.
|
|
|
|
Return the number of parts of the mesage.
|
|
|
|
"""
|
|
|
|
indent = depth * '\t'
|
2018-08-14 19:25:07 +02:00
|
|
|
typ = part.get_content_type()
|
2019-10-26 17:09:22 +02:00
|
|
|
|
2018-08-14 19:25:07 +02:00
|
|
|
if part.is_multipart():
|
|
|
|
print(PART_MULTI_FORMAT.format(nb=nb, indent=indent, typ=typ))
|
|
|
|
payl = part.get_payload()
|
2019-10-26 17:09:22 +02:00
|
|
|
assert isinstance(payl, list)
|
2018-08-14 19:25:07 +02:00
|
|
|
size = 1
|
|
|
|
for obj in payl:
|
2019-10-26 17:09:22 +02:00
|
|
|
size += show_parts_tree(obj, depth=depth+1, nb=nb+size)
|
2018-08-14 19:25:07 +02:00
|
|
|
return size
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
# size = len(part.get_payload(decode=True))
|
|
|
|
payl = part.get_payload(decode=True)
|
|
|
|
assert isinstance(payl, bytes)
|
|
|
|
size = len(payl)
|
|
|
|
desc = part.get('Content-Description', '<no description>')
|
|
|
|
print(PART_LEAF_FORMAT.format(nb=nb, indent=indent, typ=typ,
|
|
|
|
desc=desc, size=sizeof_fmt(size)))
|
|
|
|
return 1
|
|
|
|
|
2018-08-14 19:25:07 +02:00
|
|
|
|
|
|
|
INTERESTING_HEADERS = ["Date", "From", "Subject", "To", "Cc", "Message-Id"]
|
2019-10-26 17:09:22 +02:00
|
|
|
HEADER_FORMAT = colorama.Fore.BLUE + colorama.Style.BRIGHT + \
|
|
|
|
'{}:' + colorama.Style.NORMAL + ' {}' + colorama.Style.RESET_ALL
|
|
|
|
|
|
|
|
|
|
|
|
def read_msg(msg: notmuch.Message) -> None:
|
2018-08-14 19:25:07 +02:00
|
|
|
# Parse
|
|
|
|
filename = msg.get_filename()
|
|
|
|
parser = email.parser.BytesParser()
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
mail = parser.parse(f)
|
|
|
|
|
|
|
|
# Debug
|
|
|
|
global a
|
|
|
|
a = mail
|
|
|
|
|
|
|
|
# Defects
|
2019-10-26 17:09:22 +02:00
|
|
|
if mail.defects:
|
|
|
|
log.warning("Defects found in the mail:")
|
2018-08-14 19:25:07 +02:00
|
|
|
for defect in mail.defects:
|
2019-10-26 17:09:22 +02:00
|
|
|
log.warning(defect)
|
2018-08-14 19:25:07 +02:00
|
|
|
|
|
|
|
# Headers
|
2018-10-14 16:59:49 +02:00
|
|
|
for key in INTERESTING_HEADERS:
|
2018-08-14 19:25:07 +02:00
|
|
|
val = mail.get(key)
|
|
|
|
if val:
|
2019-10-26 17:09:22 +02:00
|
|
|
assert isinstance(val, str)
|
2018-08-14 19:25:07 +02:00
|
|
|
val = format_header_value(val)
|
|
|
|
print(HEADER_FORMAT.format(key, val))
|
|
|
|
# TODO Show all headers
|
|
|
|
# TODO BONUS Highlight failed verifications
|
|
|
|
|
|
|
|
show_parts_tree(mail)
|
|
|
|
print()
|
|
|
|
|
|
|
|
# Show text/plain
|
|
|
|
for part in mail.walk():
|
|
|
|
if part.get_content_type() == "text/plain":
|
|
|
|
payl = part.get_payload(decode=True)
|
2019-10-26 17:09:22 +02:00
|
|
|
assert isinstance(payl, bytes)
|
2018-08-14 19:25:07 +02:00
|
|
|
print(payl.decode())
|
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
|
|
|
|
perfstep("definitions")
|
|
|
|
|
2018-08-14 10:08:59 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
# Main arguments
|
|
|
|
parser = argparse.ArgumentParser(description="Meh mail client")
|
|
|
|
selectedVerbosityLevels = ["DEBUG", "INFO", "WARNING", "ERROR", "FATAL"]
|
2019-10-26 17:09:22 +02:00
|
|
|
parser.add_argument('-v', '--verbosity', choices=selectedVerbosityLevels,
|
|
|
|
default='WARNING', help="Verbosity of log messages")
|
|
|
|
# parser.add_argument('-n', '--dry-run', action='store_true',
|
|
|
|
# help="Don't do anything") # DEBUG
|
|
|
|
defaultConfigFile = os.path.join(
|
|
|
|
xdg.BaseDirectory.xdg_config_home, 'mel', 'accounts.conf')
|
|
|
|
parser.add_argument('-c', '--config', default=defaultConfigFile,
|
|
|
|
help="Accounts config file")
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
subparsers = parser.add_subparsers(help="Action to execute")
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
# List messages
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
def func_default(_: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Default operation: list all message in the inbox
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
applyMsgs('tag:inbox', print_msg)
|
|
|
|
parser.set_defaults(func=func_default)
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
# inbox (default)
|
2019-10-26 17:09:22 +02:00
|
|
|
def func_inbox(args: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Inbox operation: list all message in the inbox,
|
|
|
|
possibly only the unread ones.
|
|
|
|
"""
|
2018-08-14 10:08:59 +02:00
|
|
|
queryStr = 'tag:unread' if args.only_unread else 'tag:inbox'
|
|
|
|
applyMsgs(queryStr, print_msg)
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
parserInbox = subparsers.add_parser(
|
|
|
|
"inbox", help="Show unread, unsorted and flagged messages")
|
|
|
|
parserInbox.add_argument('-u', '--only-unread', action='store_true',
|
|
|
|
help="Show unread messages only")
|
2018-08-14 10:08:59 +02:00
|
|
|
# TODO Make this more relevant
|
|
|
|
parserInbox.set_defaults(func=func_inbox)
|
|
|
|
|
|
|
|
# list folder [--recurse]
|
2019-10-26 17:09:22 +02:00
|
|
|
# List actions
|
2018-08-14 17:23:57 +02:00
|
|
|
|
|
|
|
# flag msg...
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def func_flag(args: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Flag operation: Flag user selected messages.
|
|
|
|
"""
|
|
|
|
def flag_msg(msg: notmuch.Message) -> None:
|
|
|
|
"""
|
|
|
|
Flag given message.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
msg.add_tag('flagged')
|
2018-08-14 19:25:07 +02:00
|
|
|
apply_msgs_input(args.message, flag_msg, write=True)
|
2018-08-14 17:23:57 +02:00
|
|
|
parserFlag = subparsers.add_parser("flag", help="Mark messages as flagged")
|
|
|
|
parserFlag.add_argument('message', nargs='*', help="Messages")
|
|
|
|
parserFlag.set_defaults(func=func_flag)
|
|
|
|
|
|
|
|
# unflag msg...
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def func_unflag(args: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Unflag operation: Flag user selected messages.
|
|
|
|
"""
|
|
|
|
def unflag_msg(msg: notmuch.Message) -> None:
|
|
|
|
"""
|
|
|
|
Unflag given message.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
msg.remove_tag('flagged')
|
2018-08-14 19:25:07 +02:00
|
|
|
apply_msgs_input(args.message, unflag_msg, write=True)
|
2019-10-26 17:09:22 +02:00
|
|
|
parserUnflag = subparsers.add_parser(
|
|
|
|
"unflag", help="Mark messages as not-flagged")
|
2018-08-14 17:23:57 +02:00
|
|
|
parserUnflag.add_argument('message', nargs='*', help="Messages")
|
|
|
|
parserUnflag.set_defaults(func=func_unflag)
|
|
|
|
|
2018-08-14 10:08:59 +02:00
|
|
|
# delete msg...
|
|
|
|
# spam msg...
|
|
|
|
# move dest msg...
|
2019-10-26 17:09:22 +02:00
|
|
|
# Read message
|
2018-08-14 19:25:07 +02:00
|
|
|
|
2018-08-14 10:08:59 +02:00
|
|
|
# read msg [--html] [--plain] [--browser]
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def func_read(args: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Read operation: show full content of selected message
|
|
|
|
"""
|
2018-08-14 19:25:07 +02:00
|
|
|
apply_msgs_input(args.message, read_msg)
|
|
|
|
parserRead = subparsers.add_parser("read", help="Read message")
|
|
|
|
parserRead.add_argument('message', nargs=1, help="Messages")
|
|
|
|
parserRead.set_defaults(func=func_read)
|
|
|
|
|
2018-08-14 10:08:59 +02:00
|
|
|
# attach msg [id] [--save] (list if no id, xdg-open else)
|
2019-10-26 17:09:22 +02:00
|
|
|
# Redaction
|
2018-08-14 10:08:59 +02:00
|
|
|
# new account
|
|
|
|
# reply msg [--all]
|
2019-10-26 17:09:22 +02:00
|
|
|
# Folder management
|
2018-08-14 17:23:57 +02:00
|
|
|
# tree [folder]
|
2018-08-14 10:08:59 +02:00
|
|
|
# mkdir folder
|
|
|
|
# rmdir folder (prevent if folder isn't empty (mail/subfolder))
|
|
|
|
# (yeah that should do)
|
2019-10-26 17:09:22 +02:00
|
|
|
# Meta
|
2018-08-14 10:08:59 +02:00
|
|
|
# setup (interactive thing maybe)
|
|
|
|
|
|
|
|
# fetch (mbsync, notmuch new, retag, notify; called by greater gods)
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def func_fetch(args: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Fetch operation: Sync remote databases with the local one.
|
|
|
|
"""
|
2018-08-14 10:08:59 +02:00
|
|
|
# Fetch mails
|
|
|
|
log.info("Fetching mails")
|
2019-10-26 17:09:22 +02:00
|
|
|
mbsyncConfigPath = os.path.expanduser(
|
|
|
|
"~/.config/mbsyncrc") # TODO Better
|
2018-08-14 10:08:59 +02:00
|
|
|
cmd = ["mbsync", "--config", mbsyncConfigPath, "--all"]
|
2019-10-26 17:09:22 +02:00
|
|
|
subprocess.run(cmd, check=True)
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
# Index new mails
|
2018-08-17 15:08:40 +02:00
|
|
|
notmuch_new()
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
# Notify
|
2018-08-14 17:23:57 +02:00
|
|
|
notify_all()
|
|
|
|
|
|
|
|
# Tag new mails
|
|
|
|
applyMsgs('tag:unprocessed', retag_msg, showProgress=True, write=True)
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
parserFetch = subparsers.add_parser(
|
|
|
|
"fetch", help="Fetch mail, tag them, and run notifications")
|
2018-08-14 10:08:59 +02:00
|
|
|
parserFetch.set_defaults(func=func_fetch)
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
# Debug
|
2018-08-14 17:23:57 +02:00
|
|
|
# debug (various)
|
2019-10-26 17:09:22 +02:00
|
|
|
|
|
|
|
def func_expose(_: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
DEBUG
|
|
|
|
"""
|
2018-08-14 19:25:07 +02:00
|
|
|
# And leave the door open
|
2019-10-26 17:09:22 +02:00
|
|
|
def expose_object(msg: typing.Any) -> None:
|
|
|
|
"""
|
|
|
|
DEBUG
|
|
|
|
"""
|
|
|
|
global a
|
|
|
|
a = msg
|
|
|
|
applyMsgs('tag:tuidyviU45m6flff', expose_object, closeDb=False)
|
|
|
|
|
|
|
|
def func_debug(_: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
DEBUG
|
|
|
|
"""
|
2018-08-17 15:08:40 +02:00
|
|
|
from pprint import pprint
|
|
|
|
pprint(list_folders())
|
2019-10-26 17:09:22 +02:00
|
|
|
parserDebug = subparsers.add_parser(
|
|
|
|
"debug", help="Who know what this holds...")
|
2018-08-14 19:25:07 +02:00
|
|
|
parserDebug.set_defaults(verbosity='DEBUG')
|
2018-08-17 15:08:40 +02:00
|
|
|
parserDebug.set_defaults(func=func_debug)
|
2018-08-14 17:23:57 +02:00
|
|
|
|
|
|
|
# retag (all or unprocessed)
|
2019-10-26 17:09:22 +02:00
|
|
|
def func_retag(_: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
Retag operation: Manually retag all the mails in the database.
|
|
|
|
Mostly debug I suppose.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
applyMsgs('*', retag_msg, showProgress=True, write=True)
|
2019-10-26 17:09:22 +02:00
|
|
|
parserRetag = subparsers.add_parser(
|
|
|
|
"retag", help="Retag all mails (when you changed configuration)")
|
2018-08-14 17:23:57 +02:00
|
|
|
parserRetag.set_defaults(func=func_retag)
|
|
|
|
|
|
|
|
# all
|
2019-10-26 17:09:22 +02:00
|
|
|
def func_all(_: argparse.Namespace) -> None:
|
|
|
|
"""
|
|
|
|
All operation: list every single message.
|
|
|
|
"""
|
2018-08-14 17:23:57 +02:00
|
|
|
applyMsgs('*', print_msg)
|
2018-08-14 10:08:59 +02:00
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
parserAll = subparsers.add_parser("all", help="Show ALL messages")
|
|
|
|
parserAll.set_defaults(func=func_all)
|
|
|
|
|
|
|
|
# Init
|
2018-08-14 10:08:59 +02:00
|
|
|
args = parser.parse_args()
|
2018-08-14 17:23:57 +02:00
|
|
|
perfstep("parse_args")
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
colorama.init()
|
|
|
|
coloredlogs.install(level=args.verbosity, fmt='%(levelname)s %(message)s')
|
|
|
|
log = logging.getLogger()
|
|
|
|
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Loading config %s", args.config)
|
2018-08-14 10:08:59 +02:00
|
|
|
if not os.path.isfile(args.config):
|
2019-10-26 17:09:22 +02:00
|
|
|
log.fatal("config file not found: %s", args.config)
|
2018-08-14 10:08:59 +02:00
|
|
|
sys.exit(1)
|
|
|
|
# TODO Create it, maybe?
|
2019-10-26 17:09:22 +02:00
|
|
|
CONFIG = configparser.ConfigParser()
|
|
|
|
CONFIG.read(args.config)
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
generate_aliases()
|
2018-08-14 17:23:57 +02:00
|
|
|
perfstep("config")
|
2018-08-14 10:08:59 +02:00
|
|
|
|
|
|
|
if args.func:
|
2019-10-26 17:09:22 +02:00
|
|
|
log.info("Executing function %s", args.func)
|
2018-08-14 10:08:59 +02:00
|
|
|
args.func(args)
|
|
|
|
|
2018-08-14 17:23:57 +02:00
|
|
|
perfstep("exec")
|
|
|
|
|
|
|
|
# DEBUG
|
2019-10-26 17:09:22 +02:00
|
|
|
for kv in sorted(PERF_DICT.items(), key=lambda p: p[1]):
|
|
|
|
log.debug("{1:.6f}s {0}".format(*kv))
|
2019-07-08 07:57:29 +02:00
|
|
|
sys.exit(0)
|