Let my HOME alone 1/2
This commit is contained in:
parent
2ae37e902e
commit
a83e45df5e
94 changed files with 328 additions and 58 deletions
2
config/scripts/.gitignore
vendored
Normal file
2
config/scripts/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
sct
|
||||
node_modules
|
3
config/scripts/arch-kexec
Executable file
3
config/scripts/arch-kexec
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
sudo kexec -l /boot/vmlinuz-linux --initrd=/boot/initramfs-linux.img --reuse-cmdline
|
||||
sudo systemctl kexec
|
182
config/scripts/archive
Executable file
182
config/scripts/archive
Executable file
|
@ -0,0 +1,182 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import coloredlogs
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
coloredlogs.install(level='DEBUG', fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
# Coding conventions:
|
||||
# No leading or trailing slashes. Let os.path.join do its job
|
||||
|
||||
# TODO Config arparse and pass args to the functions. No globals
|
||||
|
||||
# Finding directories
|
||||
assert 'HOME' in os.environ, "Home directory unknown"
|
||||
DOCS = os.path.realpath(os.path.join(os.environ['HOME'], 'Documents'))
|
||||
assert os.path.isdir(DOCS), "Documents folder not found"
|
||||
ARCS = os.path.realpath(os.path.join(os.environ['HOME'], 'Archives'))
|
||||
assert os.path.isdir(ARCS), "Archives folder not found"
|
||||
|
||||
|
||||
def dirRange(relpath):
|
||||
splits = relpath.split(os.path.sep)
|
||||
res = list()
|
||||
|
||||
for p in range(len(splits)):
|
||||
partPath = os.path.join(*splits[:p+1])
|
||||
|
||||
arcPath = os.path.join(os.path.join(ARCS, partPath))
|
||||
docPath = os.path.join(os.path.join(DOCS, partPath))
|
||||
|
||||
res.append((docPath, arcPath))
|
||||
|
||||
return res
|
||||
|
||||
def travel(relpath):
|
||||
"""
|
||||
Dunno what this will do, let's write code and see.
|
||||
"""
|
||||
wholeRange = dirRange(relpath)
|
||||
for tup in wholeRange:
|
||||
isLast = wholeRange[-1] == tup
|
||||
docPath, arcPath = tup
|
||||
linkPath = os.path.relpath(arcPath, start=docPath)
|
||||
|
||||
log.debug(f"47 {tup}")
|
||||
|
||||
if not os.path.exists(docPath) and not os.path.exists(arcPath):
|
||||
log.error("Not existing")
|
||||
sys.exit(1)
|
||||
elif os.path.isdir(docPath) and os.path.isdir(arcPath) and not isLast:
|
||||
log.debug("Both folder")
|
||||
continue
|
||||
elif os.path.isdir(docPath) and os.path.isdir(arcPath) and isLast:
|
||||
log.error("This should fail for some reason, maybe")
|
||||
sys.exit(1)
|
||||
elif os.path.islink(docPath) and os.path.exists(arcPath):
|
||||
currentLink = os.readlink(docPath)
|
||||
if currentLink != linkPath:
|
||||
log.warning(f"'{docPath}' is pointing to '{currentLink}' " +
|
||||
f"but should point to '{linkPath}'.")
|
||||
# TODO Fixing if asked for
|
||||
sys.exit(1)
|
||||
log.debug("Early link already exists {docPath} → {arcPath}")
|
||||
return
|
||||
elif not os.path.exists(docPath) and os.path.exists(arcPath):
|
||||
log.debug("Only existing on archive side, linking")
|
||||
print(f"ln -s {linkPath} {docPath}")
|
||||
elif os.path.exists(docPath) and not os.path.exists(arcPath) \
|
||||
and isLast:
|
||||
log.debug("Only existing on doc side, moving and linking")
|
||||
print(f"mv {docPath} {arcPath}")
|
||||
print(f"ln -s {linkPath} {docPath}")
|
||||
elif os.path.exists(docPath) and not os.path.exists(arcPath) \
|
||||
and not isLast:
|
||||
raise NotImplementedError("Here comes the trouble")
|
||||
else:
|
||||
log.error("Unhandled case")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def ensureLink(relpath):
|
||||
"""
|
||||
Ensure that ~/Documents/$relpath points to ~/Archives/$relpath
|
||||
"""
|
||||
arcPath = os.path.join(os.path.join(ARCS, relpath))
|
||||
docPath = os.path.join(os.path.join(DOCS, relpath))
|
||||
assert os.path.exists(arcPath)
|
||||
|
||||
# For each tree element of the path
|
||||
for docPath, arcPath in dirRange(relpath):
|
||||
linkPath = os.path.relpath(arcPath, start=docPath)
|
||||
|
||||
def installLink():
|
||||
if args.dry:
|
||||
print(f"ln -s {linkPath} {docPath}")
|
||||
else:
|
||||
os.symlink(linkPath, docPath)
|
||||
|
||||
if os.path.islink(docPath):
|
||||
currentLink = os.readlink(docPath)
|
||||
if currentLink != linkPath:
|
||||
log.warning(f"'{docPath}' is pointing to '{currentLink}' " +
|
||||
f"but should point to '{linkPath}'. Fixing")
|
||||
if args.dry:
|
||||
print(f"rm {docPath}")
|
||||
else:
|
||||
os.unlink(docPath)
|
||||
installLink()
|
||||
return
|
||||
elif not os.path.exists(docPath):
|
||||
installLink()
|
||||
return
|
||||
elif os.path.isdir(docPath):
|
||||
continue
|
||||
else:
|
||||
raise RuntimeError(f"'{docPath}' exists and is not a directory " +
|
||||
f"or a link. Unable to link it to '{linkPath}'")
|
||||
raise RuntimeError(f"'{docPath}' is a directory. Unable to link it to " +
|
||||
f"'{linkPath}'")
|
||||
|
||||
|
||||
def archive(docdir):
|
||||
docdir = os.path.realpath(args.dir)
|
||||
assert os.path.isdir(docdir), docdir + " must be a directory"
|
||||
|
||||
assert docdir.startswith(DOCS), "Directory is not in the document folder"
|
||||
assert not docdir.startswith(ARCS), "Directory is already in the archive folder"
|
||||
|
||||
reldir = os.path.relpath(docdir, DOCS)
|
||||
print("ARC", reldir)
|
||||
|
||||
arcdir = os.path.join(ARCS, reldir)
|
||||
parentArcdir = os.path.realpath(os.path.join(arcdir, '..'))
|
||||
parentDocdir = os.path.realpath(os.path.join(docdir, '..'))
|
||||
linkDest = os.path.relpath(arcdir, parentDocdir)
|
||||
|
||||
# BULLSHIT
|
||||
|
||||
# If the directory exists
|
||||
if os.path.isdir(arcdir):
|
||||
return
|
||||
# for f in os.listdir(arcdir):
|
||||
# assert os.path.isdir(f), "Something unknown in Archive dir")
|
||||
# archive(os.path.join(arcdir, f))
|
||||
|
||||
# If the directory doesn't exist, create the directories under it and move all the folder
|
||||
else:
|
||||
|
||||
if args.dry:
|
||||
print("mkdir -p", parentArcdir)
|
||||
else:
|
||||
os.makedirs(parentArcdir, exist_ok=True)
|
||||
|
||||
if args.dry:
|
||||
print("mv", docdir, arcdir)
|
||||
else:
|
||||
os.rename(docdir, arcdir)
|
||||
|
||||
if args.dry:
|
||||
print("ln -s", linkDest, docdir)
|
||||
else:
|
||||
os.symlink(linkDest, docdir)
|
||||
|
||||
|
||||
def unarchive(arcdir):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description="Place a folder in ~/Documents in ~/Documents/Archives and symlink it")
|
||||
parser.add_argument('dir', metavar='DIRECTORY', type=str, help="The directory to archive")
|
||||
parser.add_argument('-d', '--dry', action='store_true')
|
||||
args = parser.parse_args()
|
||||
args.dry = True # DEBUG
|
||||
|
||||
# archive(args.dir)
|
||||
ensureLink(args.dir)
|
3
config/scripts/beep
Executable file
3
config/scripts/beep
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env sh
|
||||
play -n synth sine E5 sine A4 remix 1-2 fade 0.5 1.2 0.5 2> /dev/null
|
||||
# echo
|
49
config/scripts/changeColors
Executable file
49
config/scripts/changeColors
Executable file
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Fetchs colors from https://github.com/chriskempson/base16 templates
|
||||
# and apply them into my configuration
|
||||
|
||||
scheme="$1"
|
||||
if [ -z "$scheme" ]
|
||||
then
|
||||
echo Please specify a scheme
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# TODO Verify if scheme is known
|
||||
|
||||
# Shell (allows to use all colors in Vim while still having nice colors in the rest of the terminal)
|
||||
curl "https://raw.githubusercontent.com/chriskempson/base16-shell/master/scripts/base16-${scheme}.sh" > ~/.local/bin/colorSchemeApply
|
||||
chmod +x ~/.local/bin/colorSchemeApply
|
||||
|
||||
# Xressources (I'm not sure if this is really needed with shell overriding these but i3 load those resources)
|
||||
curl "https://raw.githubusercontent.com/chriskempson/base16-xresources/master/xresources/base16-${scheme}-256.Xresources" > ~/.Xresources.d/theme
|
||||
|
||||
# Vim
|
||||
echo -e "let base16colorspace=256\n\"set termguicolors\ncolorscheme base16-${scheme}" > ~/.vim/colorscheme.vim
|
||||
|
||||
# FZF
|
||||
curl "https://raw.githubusercontent.com/nicodebo/base16-fzf/master/bash/base16-${scheme}.config" > ~/.local/bin/colorSchemeApplyFzf
|
||||
chmod +x ~/.local/bin/colorSchemeApplyFzf
|
||||
|
||||
# qutebrowser
|
||||
curl "https://raw.githubusercontent.com/theova/base16-qutebrowser/4a17eea8a39f722c2cee95fb44d4a87f5eb2518f/themes/base16-${scheme}.config.py" > ~/.config/qutebrowser/theme.py
|
||||
|
||||
# rofi
|
||||
curl "https://raw.githubusercontent.com/0xdec/base16-rofi/master/themes/base16-${scheme}.rasi" > ~/.config/rofi/theme.rasi
|
||||
curl "https://raw.githubusercontent.com/0xdec/base16-rofi/master/themes/base16-${scheme}.config" > ~/.config/rofi/theme.config
|
||||
|
||||
# qutebrowser
|
||||
curl "https://raw.githubusercontent.com/theova/base16-qutebrowser/master/themes/base16-${scheme}.config.py" > ~/.config/qutebrowser/theme.py
|
||||
|
||||
# TODO dunst (template online, but not to my liking)
|
||||
# TODO bar (might change bar in the future, so...)
|
||||
# TODO highlight (there IS a template but the colors look different from vim and mostly the same from when there's no config)
|
||||
|
||||
# Reload a bunch of things to make changes immediate
|
||||
~/.local/bin/colorSchemeApply
|
||||
~/.local/bin/colorSchemeApplyFzf
|
||||
# Error masking in case X not running
|
||||
xrdb -load ~/.Xresources 2> /dev/null
|
||||
i3-msg reload 2> /dev/null
|
||||
|
16
config/scripts/cleandev
Executable file
16
config/scripts/cleandev
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Removes files that can be regenerated
|
||||
# from a dev environment
|
||||
|
||||
find . -type d -name bower_components -or -name node_modules -print0 | while read file; do
|
||||
rm -rf "$file"
|
||||
done
|
||||
find . -type f -name Makefile -print0 | while IFS= read -r -d '' file; do
|
||||
echo "--> $file"
|
||||
(cd "${file//Makefile}"; make clean)
|
||||
done
|
||||
find . -type d -name .git -print0 | while IFS= read -r -d '' dir; do
|
||||
echo "--> $file"
|
||||
(cd "$dir"; git gc)
|
||||
done
|
213
config/scripts/compressPictureMovies
Executable file
213
config/scripts/compressPictureMovies
Executable file
|
@ -0,0 +1,213 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import logging
|
||||
import coloredlogs
|
||||
import progressbar
|
||||
import time
|
||||
import hashlib
|
||||
import tempfile
|
||||
import json
|
||||
import statistics
|
||||
import datetime
|
||||
|
||||
coloredlogs.install(level='DEBUG', fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
# Constants
|
||||
PICTURES_FOLDER = os.path.join(os.path.expanduser("~"), "Images")
|
||||
ORIGINAL_FOLDER = os.path.join(os.path.expanduser("~"), ".ImagesOriginaux")
|
||||
MOVIE_EXTENSIONS = ["mov", "avi", "mp4", "3gp", "webm", "mkv"]
|
||||
OUTPUT_EXTENSION = "webm"
|
||||
OUTPUT_FFMPEG_PARAMETERS = ["-c:v", "libvpx-vp9", "-crf", "30", "-b:v", "0"]
|
||||
# OUTPUT_FFMPEG_PARAMETERS = ["-c:v", "libaom-av1", "-crf", "30", "-strict", "experimental", "-c:a", "libopus"]
|
||||
DURATION_MAX_DEV = 1
|
||||
|
||||
|
||||
def videoMetadata(filename):
|
||||
assert os.path.isfile(filename)
|
||||
cmd = ["ffmpeg", "-i", filename, "-f", "ffmetadata", "-"]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
||||
p.check_returncode()
|
||||
metadataRaw = p.stdout
|
||||
data = dict()
|
||||
for metadataLine in metadataRaw.split(b'\n'):
|
||||
# Skip empty lines
|
||||
if not len(metadataLine):
|
||||
continue
|
||||
# Skip comments
|
||||
if metadataLine.startswith(b';'):
|
||||
continue
|
||||
# Parse key-value
|
||||
metadataLineSplit = metadataLine.split(b'=')
|
||||
if len(metadataLineSplit) != 2:
|
||||
log.warning("Unparsed metadata line: `{}`".format(metadataLine))
|
||||
continue
|
||||
key, val = metadataLineSplit
|
||||
key = key.decode().lower()
|
||||
val = val.decode()
|
||||
data[key] = val
|
||||
return data
|
||||
|
||||
def videoInfos(filename):
|
||||
assert os.path.isfile(filename)
|
||||
cmd = ["ffprobe", filename, "-print_format", "json", "-show_streams"]
|
||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
||||
p.check_returncode()
|
||||
infosRaw = p.stdout
|
||||
infos = json.loads(infosRaw)
|
||||
return infos
|
||||
|
||||
from pprint import pprint
|
||||
def streamDuration(stream):
|
||||
if "duration" in stream:
|
||||
return float(stream["duration"])
|
||||
elif "sample_rate" in stream and "nb_frames" in stream:
|
||||
return int(stream["nb_frames"]) / int(stream["sample_rate"])
|
||||
elif "tags" in stream and "DURATION" in stream["tags"]:
|
||||
durRaw = stream["tags"]["DURATION"]
|
||||
durSplit = durRaw.split(":")
|
||||
assert len(durSplit) == 3
|
||||
durSplitFloat = [float(a) for a in durSplit]
|
||||
hours, minutes, seconds = durSplitFloat
|
||||
return (hours * 60 + minutes) * 60 + seconds
|
||||
else:
|
||||
raise KeyError("Can't find duration information in stream")
|
||||
|
||||
def videoDuration(filename):
|
||||
# TODO Doesn't work with VP8 / webm
|
||||
infos = videoInfos(filename)
|
||||
durations = [streamDuration(stream) for stream in infos["streams"]]
|
||||
dev = statistics.stdev(durations)
|
||||
assert dev <= DURATION_MAX_DEV, "Too much deviation ({} s)".format(dev)
|
||||
return sum(durations)/len(durations)
|
||||
|
||||
|
||||
todos = set()
|
||||
totalSize = 0
|
||||
totalDuration = 0
|
||||
|
||||
# Walk folders
|
||||
log.info("Listing files in {}".format(PICTURES_FOLDER))
|
||||
allVideos = list()
|
||||
for root, dirs, files in os.walk(PICTURES_FOLDER):
|
||||
# If folder is in ORIGINAL_FOLDER, skip it
|
||||
if root.startswith(ORIGINAL_FOLDER):
|
||||
continue
|
||||
# Iterate over files
|
||||
for inputName in files:
|
||||
# If the file is not a video, skip it
|
||||
inputNameBase, inputExt = os.path.splitext(inputName)
|
||||
inputExt = inputExt[1:].lower()
|
||||
if inputExt not in MOVIE_EXTENSIONS:
|
||||
continue
|
||||
|
||||
allVideos.append((root, inputName))
|
||||
|
||||
log.info("Analyzing videos")
|
||||
for root, inputName in progressbar.progressbar(allVideos):
|
||||
inputNameBase, inputExt = os.path.splitext(inputName)
|
||||
inputExt = inputExt[1:].lower()
|
||||
|
||||
# Generates all needed filepaths
|
||||
## Found file
|
||||
inputFull = os.path.join(root, inputName)
|
||||
inputRel = os.path.relpath(inputFull, PICTURES_FOLDER)
|
||||
## Original file
|
||||
originalFull = os.path.join(ORIGINAL_FOLDER, inputRel)
|
||||
originalRel = inputRel
|
||||
assert not os.path.isfile(originalFull), originalFile + " exists"
|
||||
|
||||
## Compressed file
|
||||
outputFull = os.path.join(root, inputNameBase + "." + OUTPUT_EXTENSION)
|
||||
|
||||
# If the extension is the same of the output one
|
||||
if inputExt == OUTPUT_EXTENSION:
|
||||
# Read the metadata of the video
|
||||
meta = videoMetadata(inputFull)
|
||||
|
||||
# If it has the field with the original file
|
||||
if 'original' in meta:
|
||||
# Skip file
|
||||
continue
|
||||
else:
|
||||
assert not os.path.isfile(outputFull), outputFull + " exists"
|
||||
|
||||
|
||||
size = os.stat(inputFull).st_size
|
||||
try:
|
||||
duration = videoDuration(inputFull)
|
||||
except Exception as e:
|
||||
log.warning("Can't determine duration of {}, skipping".format(inputFull))
|
||||
log.debug(e, exc_info=True)
|
||||
continue
|
||||
|
||||
todo = (inputFull, originalFull, outputFull, size, duration)
|
||||
|
||||
totalDuration += duration
|
||||
totalSize += size
|
||||
todos.add(todo)
|
||||
|
||||
log.info("Converting {} videos ({})".format(len(todos), datetime.timedelta(seconds=totalDuration)))
|
||||
|
||||
# From https://stackoverflow.com/a/3431838
|
||||
def sha256(fname):
|
||||
hash_sha256 = hashlib.sha256()
|
||||
with open(fname, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(131072), b""):
|
||||
hash_sha256.update(chunk)
|
||||
return hash_sha256.hexdigest()
|
||||
|
||||
# Progress bar things
|
||||
totalDataSize = progressbar.widgets.DataSize()
|
||||
totalDataSize.variable = 'max_value'
|
||||
barWidgets = [progressbar.widgets.DataSize(), ' of ', totalDataSize, ' ', progressbar.widgets.Bar(), ' ', progressbar.widgets.FileTransferSpeed(), ' ', progressbar.widgets.AdaptiveETA()]
|
||||
bar = progressbar.DataTransferBar(max_value=totalSize, widgets=barWidgets)
|
||||
bar.start()
|
||||
processedSize = 0
|
||||
|
||||
|
||||
for inputFull, originalFull, outputFull, size, duration in todos:
|
||||
tmpfile = tempfile.mkstemp(prefix="compressPictureMovies", suffix="."+OUTPUT_EXTENSION)[1]
|
||||
try:
|
||||
# Calculate the sum of the original file
|
||||
checksum = sha256(inputFull)
|
||||
|
||||
# Initiate a conversion in a temporary file
|
||||
originalRel = os.path.relpath(originalFull, ORIGINAL_FOLDER)
|
||||
originalContent = "{} {}".format(originalRel, checksum)
|
||||
metadataCmd = ["-metadata", 'original="{}"'.format(originalContent)]
|
||||
cmd = ["ffmpeg", "-hide_banner", "-y", "-i", inputFull] + OUTPUT_FFMPEG_PARAMETERS + metadataCmd + [tmpfile]
|
||||
p = subprocess.run(cmd)
|
||||
p.check_returncode()
|
||||
|
||||
# Verify the durartion of the new file
|
||||
newDuration = videoDuration(tmpfile)
|
||||
dev = statistics.stdev((duration, newDuration))
|
||||
assert dev < DURATION_MAX_DEV, "Too much deviation in duration"
|
||||
|
||||
# Move the original to the corresponding original folder
|
||||
originalDir = os.path.dirname(originalFull)
|
||||
os.makedirs(originalDir, exist_ok=True)
|
||||
shutil.move(inputFull, originalFull)
|
||||
|
||||
# Move the converted file in place of the original
|
||||
shutil.move(tmpfile, outputFull)
|
||||
except Exception as e:
|
||||
log.error("Couldn't process file {}".format(inputFull))
|
||||
log.error(e, exc_info=True)
|
||||
try:
|
||||
os.unlink(tmpfile)
|
||||
except Exception:
|
||||
pass
|
||||
# Progress bar things
|
||||
processedSize += size
|
||||
bar.update(processedSize)
|
||||
bar.finish()
|
||||
|
||||
|
||||
# TODO Iterate over the already compressed videos to assert the originals are
|
||||
# in their correct place, else move them
|
12
config/scripts/cudarun
Executable file
12
config/scripts/cudarun
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
if [ -z $DISPLAY ]
|
||||
then
|
||||
sudo tee /proc/acpi/bbswitch <<< ON
|
||||
"$@"
|
||||
rmmod nvidia_uvm
|
||||
rmmod nvidia
|
||||
sudo tee /proc/acpi/bbswitch <<< OFF
|
||||
else
|
||||
PATH="/opt/cuda/bin:$PATH" LD_LIBRARY_PATH="/opt/cuda/lib64:$LD_LIBRARY_PATH" VBLANK=0 VGL_READBACK=pbo optirun -c yuv "$@"
|
||||
fi
|
8
config/scripts/dafont
Executable file
8
config/scripts/dafont
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Downloads a font from dafont.com and
|
||||
# extracts it in the user's font dir
|
||||
|
||||
wget "http://dl.dafont.com/dl/?f=$1" -O /tmp/dafont.zip
|
||||
unzip /tmp/dafont.zip -d ~/.local/share/fonts -x *.txt
|
||||
rm -rf /tmp/dafont.zip
|
455
config/scripts/debloc
Executable file
455
config/scripts/debloc
Executable file
|
@ -0,0 +1,455 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Installs Debian packages on a Debian system
|
||||
# with no root access, in the user home
|
||||
|
||||
# CONFIGURATION
|
||||
|
||||
# Verifications
|
||||
|
||||
if [[ -z $DEBIAN_MIRROR && ! -f /etc/apt/sources.list ]]; then
|
||||
echo "Unable to find a mirror. Try setting DEBIAN_MIRROR (see help)."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z $DEBIAN_DB && ! $(which apt &> /dev/null) ]]; then
|
||||
echo "Unable to find a database for packages to install. Try setting DEBIAN_DB (see help)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Overrides
|
||||
|
||||
[ -z $DEBLOC_PREFIX ] && DEBLOC_PREFIX=$(dpkg --print-architecture)
|
||||
[ -z $DEBLOC_DB ] && DEBLOC_DB=${XDG_CONFIG_HOME:-$HOME/.config}/debloc/$DEBLOC_PREFIX
|
||||
[ -z $DEBLOC_ROOT ] && DEBLOC_ROOT=$HOME/.debloc/$DEBLOC_PREFIX
|
||||
DEBLOC_LD=$DEBLOC_ROOT/ld
|
||||
|
||||
if [ -z "$DEBIAN_MIRROR" ]; then
|
||||
DEBIAN_MIRROR="$(cat /etc/apt/sources.list | grep '^deb ' | grep main | grep -v backports)"
|
||||
DEBIAN_MIRROR="$(echo -e "$DEBIAN_MIRROR" | cut -d ' ' -f 2 | sed 's/\/$//' | sort | uniq)"
|
||||
fi
|
||||
|
||||
# Preparation
|
||||
|
||||
mkdir -p $DEBLOC_DB &> /dev/null
|
||||
mkdir -p $DEBLOC_ROOT &> /dev/null
|
||||
|
||||
# PRIVATE FUNCTIONS
|
||||
|
||||
# Tell if a package exists
|
||||
function _debloc-exists { # package
|
||||
if [[ -n $DEBIAN_DB && -f $DEBIAN_DB ]]; then
|
||||
grep "^Package: $1\$" $DEBIAN_DB --quiet
|
||||
else
|
||||
LANG=C apt-cache show $1 &> /dev/null
|
||||
fi
|
||||
if [ $? == 0 ]; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Return the real package associated with a virtual package
|
||||
# If not a virtual package, return the input
|
||||
function _debloc-filterVirtual { # package
|
||||
pkg=$1
|
||||
if [[ -n $DEBIAN_DB && -f $DEBIAN_DB ]]; then
|
||||
echo $pkg
|
||||
else
|
||||
LANG=C apt-cache policy $1 | grep "Candidate" | grep "(none)" > /dev/null
|
||||
if [ $? == 0 ]; then
|
||||
# TODO This is not really accurate
|
||||
LANG=C apt-cache showpkg $pkg | tail -1 | cut -d ' ' -f 1
|
||||
else
|
||||
echo $pkg
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# Tell if a package is installed via debloc
|
||||
function _debloc-locallyInstalled { # package
|
||||
if [ -f $DEBLOC_DB/$1 ]; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Tell if a package is installed system-wide
|
||||
function _debloc-globallyInstalled { # package
|
||||
STATUS=$(mktemp)
|
||||
LANG=C dpkg --list $1 &> $STATUS
|
||||
if [ $? != 0 ]; then
|
||||
rm -f $STATUS > /dev/null
|
||||
return 0
|
||||
fi
|
||||
cat $STATUS | grep '^Status:' | grep ' installed' --quiet
|
||||
if [ $? != 0 ]; then
|
||||
rm -f $STATUS > /dev/null
|
||||
return 0
|
||||
else
|
||||
rm -f $STATUS > /dev/null
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Get informations about a package
|
||||
function _debloc-packageShow { # package
|
||||
pkg=$1
|
||||
if [[ -n $DEBIAN_DB && -f $DEBIAN_DB ]]; then
|
||||
startline=$(grep "^Package: ${pkg}\$" $DEBIAN_DB --line-number | tail -1 | cut -d ':' -f 1)
|
||||
if [ -z "$startline" ]; then
|
||||
return 0
|
||||
fi
|
||||
sed -n "$startline,$(expr $startline + 100)p" $DEBIAN_DB | while read line; do
|
||||
if [ -z "$line" ]; then
|
||||
return 0
|
||||
fi
|
||||
echo $line
|
||||
done
|
||||
return 1
|
||||
else
|
||||
LANG=C apt-cache show $pkg | while read line; do
|
||||
if [ -z "$line" ]; then
|
||||
return 0
|
||||
fi
|
||||
echo "$line"
|
||||
done
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Get the path of a package
|
||||
function _debloc-packagePath { # package
|
||||
_debloc-packageShow $1 | grep "^Filename:" | head -1 | cut -d ':' -f 2 | sed -e 's/^[[:space:]]*//'
|
||||
return 0
|
||||
}
|
||||
|
||||
# Get the md5sum of a package
|
||||
function _debloc-packageMd5sum { # package
|
||||
_debloc-packageShow $1 | grep "^MD5sum:" | cut -d ':' -f 2 | sed -e 's/^[[:space:]]*//'
|
||||
return 0
|
||||
}
|
||||
|
||||
# Update symbolics links in $DEBLOC_ROOT/lib
|
||||
function _debloc-ldconfig {
|
||||
mkdir -p $DEBLOC_LD &> /dev/null
|
||||
rm -f $DEBLOC_LD &> /dev/null
|
||||
find $DEBLOC_ROOT{/usr,}/lib -type f -name "*.so*" | while read lib; do
|
||||
ln --symbolic --force "$lib" "$DEBLOC_LD/$(basename $lib)"
|
||||
done &> /dev/null
|
||||
find $DEBLOC_ROOT{/usr,}/lib -type l -name "*.so*" | while read link; do
|
||||
yes | cp --force --no-dereference --preserve=links "$link" "$DEBLOC_LD" &> /dev/null
|
||||
done &> /dev/null
|
||||
}
|
||||
|
||||
# Fix absolute symbolic links
|
||||
function _debloc-fixRootSymlinks {
|
||||
find $DEBLOC_ROOT -type l | while read src
|
||||
do
|
||||
dst="$(readlink "$src")"
|
||||
if echo "$dst" | grep '^/' | grep -q -v "^$DEBLOC_ROOT"
|
||||
then
|
||||
newDst="$DEBLOC_ROOT$dst"
|
||||
if [ -f "$newDst" ]
|
||||
then
|
||||
echo "$src → $newDst"
|
||||
rm "$src"
|
||||
ln -s "$newDst" "$src"
|
||||
else
|
||||
echo "Ignoring $src pointing to $dst"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function _debloc-fixPkgconfPrefix {
|
||||
sed "s|^prefix=/usr$|prefix=$DEBLOC_ROOT/usr|" $(find $DEBLOC_ROOT -type f -name "*.pc") -i
|
||||
}
|
||||
|
||||
function debloc_fix {
|
||||
echo "Fixing absolute symbolic links..."
|
||||
_debloc-fixRootSymlinks
|
||||
echo "Linking libraries in /ld"
|
||||
_debloc-ldconfig
|
||||
echo "Fixing prefix in pkg-config files"
|
||||
_debloc-fixPkgconfPrefix
|
||||
}
|
||||
|
||||
# Install debian archive
|
||||
function _debloc-installDeb { # path
|
||||
TMP_DIR=$(mktemp -d) &> /dev/null
|
||||
$(cd $TMP_DIR; ar x "$1")
|
||||
TAR_FILE=$(find $TMP_DIR -type f -name "data.tar.*" | head -1)
|
||||
if [ -e "$TAR_FILE" ]; then
|
||||
# Output for DB saving
|
||||
tar tf $TAR_FILE
|
||||
tar xf $TAR_FILE -C $DEBLOC_ROOT
|
||||
|
||||
# _debloc-ldconfig
|
||||
mkdir -p $DEBLOC_LD &> /dev/null
|
||||
tar tf $TAR_FILE | grep '^.\(/usr\)\?/lib/' | grep '\.so' | while read file; do
|
||||
lib=$(readlink -f $DEBLOC_ROOT/$file)
|
||||
if [ -f $lib ]; then
|
||||
ln --symbolic --force "$lib" "$DEBLOC_LD/$(basename $file)"
|
||||
fi
|
||||
if [ -h $lib ]; then
|
||||
yes | cp --force --no-dereference --preserve=links "$(basename $link)" "$DEBLOC_LD/" &> /dev/null
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
rm -rf $TMP_DIR &> /dev/null
|
||||
return 0
|
||||
|
||||
}
|
||||
|
||||
# Install package
|
||||
function _debloc-install { # package
|
||||
pkg=$1
|
||||
|
||||
DEB_FILE=$(mktemp) &> /dev/null
|
||||
path=$(_debloc-packagePath $pkg)
|
||||
echo -e "${DEBIAN_MIRROR}" | while read mirror; do
|
||||
if [ -z "$mirror" ]; then
|
||||
continue
|
||||
fi
|
||||
url=${mirror}/${path}
|
||||
echo "→ Downloading $url"
|
||||
wget "$url" --quiet -O $DEB_FILE
|
||||
if [ $? == 0 ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ ! -s $DEB_FILE ]; then
|
||||
echo "→ Failed (no deb file)!"
|
||||
rm $DEBLOC_DB/$pkg &> /dev/null
|
||||
return 4
|
||||
fi
|
||||
|
||||
echo "→ Verifying sums"
|
||||
theo=$(_debloc-packageMd5sum $pkg)
|
||||
real=$(md5sum $DEB_FILE | cut -d ' ' -f 1)
|
||||
if [ "$theo" != "$real" ]; then
|
||||
rm -f $DEB_FILE &> /dev/null
|
||||
echo "→ Failed (sum doesn't match)!"
|
||||
rm $DEBLOC_DB/$pkg &> /dev/null
|
||||
return 5
|
||||
fi
|
||||
|
||||
echo "→ Installing"
|
||||
_debloc-installDeb $DEB_FILE > $DEBLOC_DB/$pkg
|
||||
|
||||
echo "→ Done!"
|
||||
rm -f $DEB_FILE &> /dev/null
|
||||
return 0
|
||||
}
|
||||
|
||||
# Get the dependencies of a package
|
||||
function _debloc-packageDeps { # package
|
||||
_debloc-packageShow $1 | grep '^Depends:' | sed 's/Depends: //' | sed 's/, /\n/g' | cut -d ' ' -f 1
|
||||
return 0
|
||||
}
|
||||
|
||||
# Install package with dependencies
|
||||
function _debloc-installDeps { # package
|
||||
pkg=$1
|
||||
echo "Installing $pkg"
|
||||
touch $DEBLOC_DB/$pkg # To prevent cyclic deps
|
||||
_debloc-packageDeps $pkg | while read dep; do
|
||||
dep=$(_debloc-filterVirtual $dep)
|
||||
_debloc-locallyInstalled $dep
|
||||
if [ $? == 1 ]; then
|
||||
echo "- Dependency $dep is already installed with Debloc"
|
||||
continue
|
||||
fi
|
||||
_debloc-globallyInstalled $dep
|
||||
if [ $? == 1 ]; then
|
||||
echo "- Dependency $dep is already installed on the system"
|
||||
continue
|
||||
fi
|
||||
_debloc-installDeps $dep | while read line; do echo "- $line"; done
|
||||
done
|
||||
_debloc-install $pkg
|
||||
return 0
|
||||
}
|
||||
|
||||
# PUBLIC FUNCTIONS
|
||||
|
||||
function proxy_set_help {
|
||||
echo "Usage: $0 env"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo ' eval "$(debloc env)"'
|
||||
return 0
|
||||
}
|
||||
function debloc_env {
|
||||
echo "export PATH=\"$DEBLOC_ROOT/usr/bin:$DEBLOC_ROOT/usr/games/:$DEBLOC_ROOT/usr/lib/git-core:\$PATH\""
|
||||
echo "export LIBRARY_PATH=\"$DEBLOC_LD:\$LIBRARY_PATH\""
|
||||
echo "export C_INCLUDE_PATH=\"$DEBLOC_ROOT/usr/include:\$C_INCLUDE_PATH\""
|
||||
echo "export CPLUS_INCLUDE_PATH=\"$DEBLOC_ROOT/usr/include:$DEBLOC_ROOT/usr/include/python2.7/:$DEBLOC_ROOT/usr/include/x86_64-linux-gnu/python2.7/:\$CPLUS_INCLUDE_PATH\""
|
||||
echo "export LD_LIBRARY_PATH=\"$DEBLOC_LD:\$LD_LIBRARY_PATH\""
|
||||
echo "export PYTHONPATH=\"$DEBLOC_ROOT/usr/lib/python2/dist-packages:$DEBLOC_ROOT/usr/lib/python3/dist-packages:$DEBLOC_ROOT/usr/lib/python2.7/dist-packages:$DEBLOC_ROOT/usr/lib/python3.5/dist-packages:\$PYTHONPATH\""
|
||||
echo "export QT_QPA_PLATFORM_PLUGIN_PATH=\"$DEBLOC_ROOT/usr/lib/x86_64-linux-gnu/qt5/plugins/platforms\""
|
||||
echo "export PKG_CONFIG_PATH=\"$DEBLOC_ROOT/usr/share/pkgconfig/:$DEBLOC_ROOT/usr/lib/x86_64-linux-gnu/pkgconfig/:$DEBLOC_ROOT/usr/lib/pkgconfig/:\$PKG_CONFIG_PATH\""
|
||||
}
|
||||
|
||||
function debloc_info {
|
||||
echo "DEBLOC_PREFIX=$DEBLOC_PREFIX"
|
||||
echo "DEBLOC_ROOT=$DEBLOC_ROOT"
|
||||
echo "DEBLOC_DB=$DEBLOC_DB"
|
||||
echo "DEBLOC_LD=$DEBLOC_LD"
|
||||
echo "DEBIAN_MIRROR='$DEBIAN_MIRROR'"
|
||||
echo "DEBIAN_DB=$DEBIAN_DB"
|
||||
}
|
||||
|
||||
function debloc_install_help {
|
||||
echo "Usage: $0 install PACKAGE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " PACKAGE Package name"
|
||||
return 0
|
||||
}
|
||||
|
||||
function debloc_install { # package
|
||||
if [ -z $1 ]; then
|
||||
debloc_deb_help
|
||||
fi
|
||||
for pkg in $*
|
||||
do
|
||||
if [ $pkg == '--force' ] || [ $pkg == '-f' ]; then
|
||||
force=0
|
||||
fi
|
||||
done
|
||||
for pkg in $*; do
|
||||
if [ $pkg == '--force' ] || [ $pkg == '-f' ]; then
|
||||
continue
|
||||
fi
|
||||
pkg=$(_debloc-filterVirtual $pkg)
|
||||
_debloc-exists $pkg
|
||||
if [ $? == 0 ]; then
|
||||
echo "Unknown package $pkg"
|
||||
continue
|
||||
fi
|
||||
if [ ! -v force ]; then
|
||||
_debloc-locallyInstalled $pkg
|
||||
if [ $? == 1 ]; then
|
||||
echo "Package $pkg is already installed with Debloc"
|
||||
continue
|
||||
fi
|
||||
_debloc-globallyInstalled $pkg
|
||||
if [ $? == 1 ]; then
|
||||
echo "Package $pkg is already installed on the system"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
_debloc-installDeps $pkg
|
||||
done
|
||||
return 0
|
||||
|
||||
}
|
||||
|
||||
function debloc_deb_help {
|
||||
echo "Usage: $0 deb PATH"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " PATH Path to the .deb file"
|
||||
return 0
|
||||
}
|
||||
function debloc_deb { # path
|
||||
if [ -z $1 ]; then
|
||||
debloc_deb_help
|
||||
fi
|
||||
for path in $*; do
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "$path is not a file"
|
||||
return 6
|
||||
fi
|
||||
echo "Installing $(basename $path)"
|
||||
_debloc-installDeb "$(readlink -f $path)" > $DEBLOC_DB/$(basename $path)
|
||||
done
|
||||
return 0
|
||||
|
||||
}
|
||||
|
||||
function debloc_altern_help {
|
||||
echo "Usage: $0 altern PROGRAM ALTERNATIVE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " PROGRAM Program to set the alternative for"
|
||||
echo " ALTERNATIVE Alternative to set"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 altern vim nox"
|
||||
echo " $0 altern dmenu xft"
|
||||
return 0
|
||||
}
|
||||
function debloc_altern { # program alternative
|
||||
if [[ -z $1 || -z $2 ]]; then
|
||||
debloc_altern_help
|
||||
exit 1
|
||||
fi
|
||||
if [ -f "$DEBLOC_ROOT/usr/bin/$1.$2" ]; then
|
||||
dest="$DEBLOC_ROOT/usr/bin/$1"
|
||||
alte="$DEBLOC_ROOT/usr/bin/$1.$2"
|
||||
elif [ -f "$DEBLOC_ROOT/bin/$1.$2" ]; then
|
||||
dest="$DEBLOC_ROOT/bin/$1"
|
||||
alte="$DEBLOC_ROOT/bin/$1.$2"
|
||||
else
|
||||
echo "Unknown alternative for $1 : $2"
|
||||
exit 1
|
||||
fi
|
||||
if [ -e "$dest" ]; then
|
||||
rm $dest
|
||||
fi
|
||||
ln -s "$alte" "$dest"
|
||||
}
|
||||
|
||||
function debloc_flush {
|
||||
rm -rf $DEBLOC_ROOT/* &> /dev/null
|
||||
rm -f $DEBLOC_DB/* &> /dev/null
|
||||
}
|
||||
|
||||
# TODO Other word for 'fake filesystem' and/or explain what this is
|
||||
function debloc_help {
|
||||
command="$1"
|
||||
if [ -n "$command" ]; then
|
||||
if type "debloc_${command}_help" &> /dev/null; then
|
||||
shift
|
||||
"debloc_${command}_help" "$@"
|
||||
return $?
|
||||
fi
|
||||
fi
|
||||
echo "Usage: $0 COMMAND"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo " env Provides the environment variables required to run applications from the fake filesystem"
|
||||
echo " info Gives some information about the fake filesystem"
|
||||
echo " install Install a debian package in the fake filesystem"
|
||||
echo " deb Install from a .deb file in the fake filesystem"
|
||||
echo " altern Update alternative"
|
||||
echo " fix Apply some fixes in the fake filesystem"
|
||||
echo " flush Remove every package installed from the fake filesystem"
|
||||
echo " help Get help with commands"
|
||||
echo
|
||||
echo "Environment variables:"
|
||||
echo " DEBLOC_PREFIX Name of the fake filesystem to use (default: uses dpkg architecture)"
|
||||
echo " DEBLOC_ROOT Path of the fake filesystem (default: ~/.debloc/\$DEBLOC_PREFIX/)"
|
||||
echo " DEBLOC_DB Database of the fake filesystem (default: \$XDG_CONFIG_HOME/debloc/\$DEBLOC_PREFIX)"
|
||||
echo " DEBIAN_MIRROR Multiline list of debian mirror (default: uses /etc/apt/sources.list)"
|
||||
echo " DEBIAN_DB Path to a file with all packages description (default: uses apt-cache showpkg)"
|
||||
echo " help Get help with commands"
|
||||
return 0
|
||||
}
|
||||
|
||||
# MAIN
|
||||
|
||||
command="$1"
|
||||
shift
|
||||
if type "debloc_$command" &> /dev/null; then
|
||||
"debloc_$command" "$@"
|
||||
else
|
||||
debloc_help
|
||||
fi
|
||||
|
11
config/scripts/docker-image-childs
Executable file
11
config/scripts/docker-image-childs
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
# Find the dependent child image from an image
|
||||
|
||||
parent="$1"
|
||||
|
||||
# From https://stackoverflow.com/a/41634462
|
||||
for i in $(docker images -q)
|
||||
do
|
||||
docker history "$i" | grep -q "$parent" && echo "$i"
|
||||
done | grep -v "$parent" | sort -u
|
5
config/scripts/docker-rm
Executable file
5
config/scripts/docker-rm
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env sh
|
||||
docker unpause $(docker ps -q)
|
||||
docker kill $(docker ps -q)
|
||||
docker container prune -f
|
||||
docker network prune -f
|
246
config/scripts/dotfiles
Executable file
246
config/scripts/dotfiles
Executable file
|
@ -0,0 +1,246 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Handles dotfiles
|
||||
# Yes there are tons of similar scipts yet I wanted no more nor less than what I needed
|
||||
|
||||
# Config
|
||||
|
||||
if [ -z "$DOTHOME" ]; then
|
||||
DOTHOME="$HOME"
|
||||
fi
|
||||
if [ -z "$DOTREPO" ]; then
|
||||
DOTREPO="$HOME/.dotfiles"
|
||||
fi
|
||||
|
||||
# Common functions
|
||||
|
||||
# From http://stackoverflow.com/a/12498485
|
||||
function relativePath {
|
||||
# both $1 and $2 are absolute paths beginning with /
|
||||
# returns relative path to $2/$target from $1/$source
|
||||
source=$1
|
||||
target=$2
|
||||
|
||||
common_part=$source # for now
|
||||
result="" # for now
|
||||
|
||||
while [[ "${target#$common_part}" == "${target}" ]]; do
|
||||
# no match, means that candidate common part is not correct
|
||||
# go up one level (reduce common part)
|
||||
common_part="$(dirname $common_part)"
|
||||
# and record that we went back, with correct / handling
|
||||
if [[ -z $result ]]; then
|
||||
result=".."
|
||||
else
|
||||
result="../$result"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $common_part == "/" ]]; then
|
||||
# special case for root (no common path)
|
||||
result="$result/"
|
||||
fi
|
||||
|
||||
# since we now have identified the common part,
|
||||
# compute the non-common part
|
||||
forward_part="${target#$common_part}"
|
||||
|
||||
|
||||
# and now stick all parts together
|
||||
if [[ -n $result ]] && [[ -n $forward_part ]]; then
|
||||
result="$result$forward_part"
|
||||
elif [[ -n $forward_part ]]; then
|
||||
# extra slash removal
|
||||
# result="${forward_part:1}" # Removes the . in the beginning...
|
||||
result="${forward_part#/}"
|
||||
fi
|
||||
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
|
||||
# Script common functions
|
||||
|
||||
function _dotfiles-install-dir { # dir
|
||||
local dir
|
||||
local absSource
|
||||
local absTarget
|
||||
local relTarget
|
||||
|
||||
dir="${1%/}"
|
||||
dir="${dir#/}"
|
||||
|
||||
ls -A "$DOTREPO/$dir" | while read file; do
|
||||
if [[ -z "$dir" && $(echo $file | grep '^\(\.\|LICENSE\|README\)') ]]; then
|
||||
continue
|
||||
fi
|
||||
if [[ $(echo $file | grep '^.dfrecur$') ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -z "$dir" ]; then
|
||||
absSource="$DOTHOME/.$file"
|
||||
absTarget="$DOTREPO/$file"
|
||||
else
|
||||
absSource="$DOTHOME/.$dir/$file"
|
||||
absTarget="$DOTREPO/$dir/$file"
|
||||
fi
|
||||
relTarget="$(relativePath "$DOTHOME/$dir" "$absTarget")"
|
||||
recurIndicator="$absTarget/.dfrecur"
|
||||
|
||||
if [[ -h "$absTarget" ]]; then
|
||||
if [ -e "$absSource" ]; then
|
||||
if [ -h "$absSource" ]; then
|
||||
cmd="cp --no-dereference --force $absTarget $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
else
|
||||
yes | $cmd
|
||||
fi
|
||||
else
|
||||
echo "[ERROR] $absSource already exists, but is not a link"
|
||||
fi
|
||||
else
|
||||
cmd="cp --no-dereference --force $absTarget $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
else
|
||||
yes | $cmd
|
||||
fi
|
||||
fi
|
||||
elif [[ -f "$absTarget" || ( -d $absTarget && ! -f $recurIndicator ) ]]; then
|
||||
if [ -e "$absSource" ]; then
|
||||
if [ -h "$absSource" ]; then
|
||||
cmd="ln --symbolic --no-dereference --force $relTarget $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
else
|
||||
$cmd
|
||||
fi
|
||||
else
|
||||
echo "[ERROR] $absSource already exists, but is not a symbolic link"
|
||||
fi
|
||||
else
|
||||
cmd="ln --symbolic --no-dereference $relTarget $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
else
|
||||
$cmd
|
||||
fi
|
||||
fi
|
||||
elif [[ -d "$absTarget" && -f $recurIndicator ]]; then
|
||||
if [ -e "$absSource" ]; then
|
||||
if [ -d "$absSource" ]; then
|
||||
# echo "Directory $absSource already exists"
|
||||
_dotfiles-install-dir $dir/$file
|
||||
else
|
||||
echo "[ERROR] $absSource already exists, but is not a directory"
|
||||
fi
|
||||
else
|
||||
cmd="mkdir $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
else
|
||||
$cmd
|
||||
fi
|
||||
_dotfiles-install-dir $dir/$file
|
||||
fi
|
||||
else
|
||||
echo "[WARNING] Skipped $absTarget"
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
# Script functions
|
||||
|
||||
function dotfiles_link_help {
|
||||
echo "Usage: $0 link DOTFILE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " DOTFILE Path to the dotfile"
|
||||
return 0
|
||||
|
||||
}
|
||||
function dotfiles_link { # file
|
||||
if [ -z $1 ]; then
|
||||
dotfiles_link_help
|
||||
return 1
|
||||
fi
|
||||
absSource="$(realpath $1 2> /dev/null)"
|
||||
if [[ $? != 0 || ! -e "$absSource" ]]; then
|
||||
echo "[ERROR] $1: no such file or directory"
|
||||
return 1
|
||||
fi
|
||||
relSource="$(relativePath $DOTHOME $absSource)"
|
||||
|
||||
absTarget="$DOTREPO/$relSource"
|
||||
relTarget="$(relativePath "$DOTHOME/$dir" "$absTarget")"
|
||||
|
||||
if [ -f "$absTarget" ]; then
|
||||
echo "[ERROR/UNIMPLEMENTED] $relSource is already linked to ... something"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if [ -f "$absSource" ]; then
|
||||
if [ -d "$(dirname "$absTarget")" ]; then
|
||||
cmd="mv $absSource $absTarget"
|
||||
cmd2="ln --symbolic --no-dereference $relTarget $absSource"
|
||||
if [ $DRY_RUN ]; then
|
||||
echo $cmd
|
||||
echo $cmd2
|
||||
else
|
||||
$cmd
|
||||
$cmd2
|
||||
fi
|
||||
|
||||
else
|
||||
echo "[UNIMPLEMENTED] Linking a file in a directory that don't already exists"
|
||||
fi
|
||||
else
|
||||
echo "[UNIMPLEMENTED] Linking things other than a file"
|
||||
return 12
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function dotfiles_install {
|
||||
_dotfiles-install-dir /
|
||||
}
|
||||
|
||||
function dotfiles_help {
|
||||
command="$1"
|
||||
if [ -n "$command" ]; then
|
||||
if type "dotfiles_${command}_help" &> /dev/null; then
|
||||
shift
|
||||
"dotfiles_${command}_help" "$@"
|
||||
return $?
|
||||
fi
|
||||
fi
|
||||
echo "Usage: $0 COMMAND"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo " install Install dotfiles from repository"
|
||||
echo " link Add dotfile to repository"
|
||||
echo " help Get help with commands"
|
||||
echo
|
||||
echo "Environment variables:"
|
||||
echo " DOTHOME Where to install dotfiles"
|
||||
echo " DOTREPO Where do the dotfiles comes from"
|
||||
return 0
|
||||
}
|
||||
|
||||
# MAIN
|
||||
command="$1"
|
||||
shift
|
||||
if type "dotfiles_$command" &> /dev/null; then
|
||||
"dotfiles_$command" "$@"
|
||||
else
|
||||
dotfiles_help
|
||||
fi
|
||||
|
||||
# TODO dotfiles-{link,unlink,clean,uninstall}, better handling of DRY_RUN (use functions probably), clarify source/target thingy
|
||||
# Link and Unlink should have a clever behavior regarding
|
||||
# recusive folders
|
||||
# Ex : linking config/i3 should make config recursible
|
||||
# Ex : linking config if some files in it are linked should unlink those
|
14
config/scripts/dummy
Executable file
14
config/scripts/dummy
Executable file
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
# Remplace le contenu d'un dossier par un fichier texte
|
||||
# relatant son arborescense
|
||||
|
||||
dir="$(echo "$1" | sed 's|/\+||')"
|
||||
if [ -d "$dir" ]; then
|
||||
TEMP=$(mktemp)
|
||||
tree -a -p -D -n "$dir" > "$TEMP"
|
||||
mv "$dir" "$dir.bkp"
|
||||
mv "$TEMP" "$dir"
|
||||
else
|
||||
echo "$dir n'est pas un dossier"
|
||||
fi
|
13
config/scripts/emergency-clean
Executable file
13
config/scripts/emergency-clean
Executable file
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Clears everything it can to save space
|
||||
|
||||
rm -rf $HOME/.cache
|
||||
if which pacman &> /dev/null; then
|
||||
sudo pacman -Scc
|
||||
elif which apt-get &> /deb/null; then
|
||||
sudo apt-get clean
|
||||
fi
|
||||
if which journalctl &> /dev/null; then
|
||||
sudo journalctl --vacuum-size=100M
|
||||
fi
|
6
config/scripts/gitCheckoutModes
Executable file
6
config/scripts/gitCheckoutModes
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# From https://stackoverflow.com/a/2083563
|
||||
|
||||
git diff --summary | grep --color 'mode change 100755 => 100644' | cut -d' ' -f7- | xargs -d'\n' chmod +x
|
||||
git diff --summary | grep --color 'mode change 100644 => 100755' | cut -d' ' -f7- | xargs -d'\n' chmod -x
|
70
config/scripts/gitghost
Executable file
70
config/scripts/gitghost
Executable file
|
@ -0,0 +1,70 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
# Replace git folders with a placeholder containing the remote and the commit
|
||||
|
||||
function prompt { # text
|
||||
while true; do
|
||||
read -p "$1 [yn] " yn
|
||||
case $yn in
|
||||
[Yy]* ) return 1;;
|
||||
[Nn]* ) return 0;;
|
||||
* ) echo "Please answer yes or no.";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
if [[ "$#" == 0 || "$#" > 2 ]]
|
||||
then
|
||||
echo "Usage: $0 gitfolder [-y]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
folder="$(echo "$1" | sed 's/\/*$//')"
|
||||
if [ "$2" == "-y" ]
|
||||
then
|
||||
donotask=true
|
||||
fi
|
||||
|
||||
if [ ! -d "$folder/.git" ]
|
||||
then
|
||||
echo "$folder is not a git repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$(git -C "$folder" diff)" ]
|
||||
then
|
||||
echo "WARNING: There are unstaged change. Those will be discarded if you continue."
|
||||
fi
|
||||
|
||||
echo "Be sure that every commit in the repository is backed up somewhere else, since those will be discarded."
|
||||
|
||||
TMPFILE=$(mktemp)
|
||||
|
||||
(echo "[gitplaceholder]"
|
||||
echo "lastcommit=$(git log --format="%H" -n 1)"
|
||||
echo
|
||||
echo "[remote]"
|
||||
git -C "$folder" remote -v
|
||||
echo
|
||||
echo "[branch]"
|
||||
git -C "$folder" branch -v
|
||||
echo
|
||||
echo "[diff]"
|
||||
git -C "$folder" diff -v) > $TMPFILE 2> /dev/null
|
||||
|
||||
if [ ! $donotask ]
|
||||
then
|
||||
less $TMPFILE
|
||||
echo
|
||||
echo "This will be written in place of $folder."
|
||||
prompt "Do you want to continue ?"
|
||||
if [ "$?" == 0 ]
|
||||
then
|
||||
echo "Canceled"
|
||||
rm $TMPFILE
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
echo "Dummying..."
|
||||
rm -rf "$folder"
|
||||
mv $TMPFILE $folder
|
10
config/scripts/heavyPackages
Executable file
10
config/scripts/heavyPackages
Executable file
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Return a list of packages sorted by size
|
||||
|
||||
(echo PACKAGE SIZE; \
|
||||
for A in /var/lib/pacman/local/*/desc; do
|
||||
(sed -n 2p $A; (grep '^%SIZE%$' $A -A1 | tail -1)) | tr '\n' ' '; echo
|
||||
done \
|
||||
| sort -nrk2) \
|
||||
| column -t
|
56
config/scripts/html2pdf
Executable file
56
config/scripts/html2pdf
Executable file
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// Imports
|
||||
var fs = require('fs');
|
||||
var pdf = require('html-pdf');
|
||||
var yargs = require('yargs');
|
||||
|
||||
|
||||
// Understanding
|
||||
var argv = yargs
|
||||
.usage("Usage: $0 -o out.pdf [options]")
|
||||
.example('$0 -i doc.pdf -o doc.pdf', 'Convert doc.html to PDF using the default values')
|
||||
.help('h')
|
||||
.alias('h', 'help')
|
||||
|
||||
.describe('i', 'Input file')
|
||||
.alias('i', 'input')
|
||||
.default('i', '/dev/stdin')
|
||||
|
||||
.describe('o', 'Output file')
|
||||
.alias('o', 'output')
|
||||
|
||||
.describe('t', 'Title of file')
|
||||
.alias('t', 'title')
|
||||
.default('t', 'Sans titre')
|
||||
|
||||
.describe('b', 'Border')
|
||||
.alias('b', 'border')
|
||||
.default('b', '2cm')
|
||||
|
||||
.demandOption(['o'])
|
||||
.argv;
|
||||
|
||||
|
||||
// Settings
|
||||
options = {
|
||||
"base": "file://" + process.cwd() + '/',
|
||||
"format": "A4",
|
||||
"orientation": "portrait",
|
||||
"border": argv.border,
|
||||
|
||||
"footer": {
|
||||
"height": "10mm",
|
||||
"contents": {
|
||||
default: '<div style="text-align: left; float: left;">' + argv.title + '</div> <div style="text-align:right; float: right;">{{page}}/{{pages}}</div>',
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Reading
|
||||
htmlString = fs.readFileSync(argv.i, "utf8");
|
||||
|
||||
// Conversion
|
||||
pdf.create(htmlString, options).toFile(argv.o, function(err, res) {
|
||||
if (err) console.error(err);
|
||||
});
|
105
config/scripts/install-arch
Executable file
105
config/scripts/install-arch
Executable file
|
@ -0,0 +1,105 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Setups an Arch Linux system the way I like it
|
||||
# (requires sudo)
|
||||
|
||||
if ! which pacman &> /dev/null; then
|
||||
echo "This is not an Arch Linux system (or pacman isn't installed)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configuration
|
||||
function prompt { # text
|
||||
while true; do
|
||||
read -p "$1 [yn] " yn
|
||||
case $yn in
|
||||
[Yy]* ) return 1;;
|
||||
[Nn]* ) return 0;;
|
||||
* ) echo "Please answer yes or no.";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# Don't ask for things that are already there
|
||||
if which aurman &> /dev/null; then
|
||||
AURMAN=1
|
||||
fi
|
||||
if [ -z $AURMAN ]; then
|
||||
prompt "Do you want aurman on this machine?"
|
||||
AURMAN=$?
|
||||
fi
|
||||
# COMMON
|
||||
|
||||
# Install packages if they aren't installed
|
||||
function inst {
|
||||
# Could also use --needed but, meh
|
||||
for pkg in $*; do
|
||||
pacman -Q $pkg &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
sudo pacman -S $pkg --noconfirm
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Install package from PKGBUILD file
|
||||
function installPKGBUILD { # url
|
||||
# TODO Download snapshots
|
||||
TMP_DIR="$(mktemp -d /tmp/pkgbuild.XXXXXXXXXX)"
|
||||
cd "$TMP_DIR"
|
||||
wget "$1" -O PKGBUILD
|
||||
makepkg -si --noconfirm
|
||||
cd -
|
||||
rm -rf "$TMP_DIR"
|
||||
}
|
||||
|
||||
# SYSTEM
|
||||
inst wget
|
||||
|
||||
# Aur
|
||||
|
||||
pacman -Q aurman &> /dev/null
|
||||
if [[ $AURMAN == 1 && $? == 1 ]]; then
|
||||
gpg --recv-keys 465022E743D71E39
|
||||
installPKGBUILD "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=aurman"
|
||||
fi
|
||||
|
||||
# Git for /etc
|
||||
inst etckeeper
|
||||
sudo etckeeper init
|
||||
(cd /etc/; sudo git config user.name "etckeeper on $(cat /etc/hostname)"; sudo git config user.email "etckeeper@$(cat /etc/hostname)")
|
||||
sudo etckeeper commit "~/.dotfiles/scripts/install-arch commit"
|
||||
|
||||
|
||||
|
||||
# Uninstall Manjaro's pamac
|
||||
if pacman -Q pamac &> /dev/null ; then
|
||||
sudo pacman -Rsc pamac --noconfirm
|
||||
fi
|
||||
|
||||
# Ccache
|
||||
inst ccache
|
||||
sudo sed 's|BUILDENV=\(.\+\)!ccache\(.\+\)|BUILDENV=\1ccache\2|' /etc/makepkg.conf -i
|
||||
|
||||
# TLP
|
||||
inst tlp
|
||||
sudo sed 's|SATA_LINKPWR_ON_BAT=min_power|SATA_LINKPWR_ON_BAT=max_performance|' /etc/default/tlp -i
|
||||
sudo systemctl enable tlp.service tlp-sleep.service
|
||||
sudo systemctl disable systemd-rfkill.service systemd-rfkill.socket
|
||||
sudo tlp start
|
||||
|
||||
# Numlock on boot
|
||||
echo -e "[Service]\nExecStartPre=/bin/sh -c 'setleds +num < /dev/%I'" | sudo systemctl edit getty\@.service
|
||||
|
||||
# Makeflags
|
||||
sudo sed "s|#MAKEFLAGS=\"-j2\"|MAKEFLAGS=\"-j$(nproc)\"|" /etc/makepkg.conf -i
|
||||
|
||||
# Time synchronisation
|
||||
inst ntp
|
||||
sudo systemctl start ntpd
|
||||
sudo systemctl enable ntpd
|
||||
|
||||
# MANUAL
|
||||
|
||||
# Hibernation
|
||||
# Add resume=UUID=<UUID-of-swap-partition> to GRUB_CMDLINE_LINUX_DEFAULT and run `sudo grub-mkconfig -o /boot/grub/grub.cfg`
|
||||
|
308
config/scripts/install-prefs
Executable file
308
config/scripts/install-prefs
Executable file
|
@ -0,0 +1,308 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Installs user preferences the way I like it
|
||||
|
||||
# Configuration
|
||||
function prompt { # text
|
||||
while true; do
|
||||
read -p "$1 [yn] " yn
|
||||
case $yn in
|
||||
[Yy]* ) return 1;;
|
||||
[Nn]* ) return 0;;
|
||||
* ) echo "Please answer yes or no.";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# Don't ask for things that are already there
|
||||
TERMUX=0
|
||||
if [ -d /data/data/com.termux/files ]; then
|
||||
TERMUX=1
|
||||
GUI=0
|
||||
fi
|
||||
|
||||
if which i3 &> /dev/null; then
|
||||
GUI=1
|
||||
fi
|
||||
|
||||
if [ -z $ADMIN ]; then
|
||||
prompt "Are you a superuser on this machine?"
|
||||
ADMIN=$?
|
||||
fi
|
||||
if [ -z $GUI ]; then
|
||||
prompt "Do you want a X environment on this machine?"
|
||||
GUI=$?
|
||||
fi
|
||||
if [ -z $EXTRA ]; then
|
||||
prompt "Do you want not-so-needed software on this machine?"
|
||||
EXTRA=$?
|
||||
fi
|
||||
|
||||
# TODO Verify if the package exists before installing it
|
||||
|
||||
# System detection
|
||||
if which pacman &> /dev/null; then
|
||||
ARCH=1
|
||||
if [ $ADMIN == 1 ]; then
|
||||
sudo pacman -Sy
|
||||
function installOne { # package
|
||||
pacman -Q $1 &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
sudo pacman -S $1 --noconfirm --needed
|
||||
fi
|
||||
}
|
||||
function installFileOne { # file
|
||||
sudo pacman -U "$1"
|
||||
}
|
||||
if which aurman &> /dev/null; then
|
||||
function altInstallOne { # package
|
||||
pacman -Q $1 &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
aurman -S "$1" --noconfirm --noedit
|
||||
fi
|
||||
}
|
||||
elif which pacaur &> /dev/null; then
|
||||
function altInstallOne { # package
|
||||
pacman -Q $1 &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
pacaur -S "$1" --noconfirm --noedit
|
||||
fi
|
||||
}
|
||||
elif which yaourt &> /dev/null; then
|
||||
function altInstallOne { # package
|
||||
pacman -Q $1 &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
yaourt -S "$1" --noconfirm
|
||||
fi
|
||||
}
|
||||
else
|
||||
# Install package from PKGBUILD file
|
||||
function installPKGBUILD { # url
|
||||
TMP_DIR="$(mktemp -d /tmp/pkgbuild.XXXXXXXXXX)"
|
||||
cd "$TMP_DIR"
|
||||
wget "$1" -O PKGBUILD
|
||||
makepkg -si
|
||||
cd -
|
||||
rm -rf "$TMP_DIR"
|
||||
}
|
||||
|
||||
function altInstallOne { # package
|
||||
pacman -Q $1 &> /dev/null
|
||||
if [ $? == 1 ]; then
|
||||
installPKGBUILD "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=$1"
|
||||
fi
|
||||
}
|
||||
fi
|
||||
else
|
||||
echo "You're on a Arch System but it's not yours? Did Arch got that popular?"
|
||||
return 42
|
||||
fi
|
||||
|
||||
elif which dpkg &> /dev/null; then
|
||||
DEBIAN=1
|
||||
if [[ $ADMIN == 1 || $TERMUX == 1 ]]; then
|
||||
if [ $TERMUX == 1 ]; then
|
||||
DEBIAN=0
|
||||
apt update -y
|
||||
else
|
||||
sudo apt-get update -y
|
||||
fi
|
||||
function installOne { # package
|
||||
|
||||
# Finding out if it's already installed or not
|
||||
STATUS=$(mktemp)
|
||||
LANG=C dpkg-query --status $1 &> $STATUS
|
||||
installed=0
|
||||
if [ $? == 0 ]; then
|
||||
cat $STATUS | grep '^Status:' | grep ' installed' --quiet
|
||||
if [ $? == 0 ]; then
|
||||
installed=1
|
||||
fi
|
||||
fi
|
||||
rm -f $STATUS > /dev/null
|
||||
|
||||
# Installing if it's not installed
|
||||
if [ $installed == 0 ]; then
|
||||
# TODO noconfirm
|
||||
if [ $TERMUX == 1 ]; then
|
||||
apt install $1 -y
|
||||
else
|
||||
sudo apt-get install $1 -y
|
||||
fi
|
||||
fi
|
||||
}
|
||||
function installFileOne { # file
|
||||
dpkg -i "$1"
|
||||
}
|
||||
else
|
||||
function installOne { # package
|
||||
debloc install $1
|
||||
}
|
||||
function installFileOne { # file
|
||||
debloc deb "$1"
|
||||
}
|
||||
fi
|
||||
function altInstallOne {
|
||||
echo "[ERROR] There's no alternate installer for this distribution. Can't install $1."
|
||||
}
|
||||
else
|
||||
echo "Uuuh, what kind of distribution is this?"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Install package with the standard
|
||||
# package manager for the distribution
|
||||
function inst {
|
||||
for pkg in $*; do
|
||||
installOne $pkg
|
||||
done
|
||||
}
|
||||
|
||||
# Install package FILE with the standard
|
||||
# package manager for the distribution
|
||||
function instFile {
|
||||
for pkg in $*; do
|
||||
installFileOne $pkg
|
||||
done
|
||||
}
|
||||
|
||||
# Install package with the alternate
|
||||
# package manager for the distribution
|
||||
function altInst {
|
||||
for pkg in $*; do
|
||||
altInstallOne $pkg
|
||||
done
|
||||
}
|
||||
|
||||
function systemdUserUnit {
|
||||
systemctl --user enable "$1"
|
||||
systemctl --user start "$1"
|
||||
}
|
||||
|
||||
# Common CLI
|
||||
|
||||
changeColors monokai
|
||||
|
||||
# Utils
|
||||
if [ $TERMUX == 1 ]; then
|
||||
inst coreutils man openssl-tool grep sed sh tar
|
||||
inst termux-api
|
||||
if [ $ADMIN == 1 ]; then
|
||||
inst tsu
|
||||
fi
|
||||
fi
|
||||
inst moreutils screen ncdu lsof htop proxytunnel pv curl wget socat mosh bash-completion rsync pwgen fzf highlight
|
||||
# TODO Test those who are on Debian machines and those who aren't
|
||||
if [ $ARCH == 1 ]; then
|
||||
inst bash-completion
|
||||
altInst gopass
|
||||
else
|
||||
inst pass
|
||||
fi
|
||||
|
||||
# Dev
|
||||
if [ $DEBIAN == 1 ]; then
|
||||
inst build-essential
|
||||
elif [ $ARCH == 1 ]; then
|
||||
inst base-devel
|
||||
else
|
||||
inst make
|
||||
fi
|
||||
inst git
|
||||
|
||||
|
||||
# Text editor
|
||||
inst neovim
|
||||
if [ $DEBIAN == 1]; then
|
||||
inst python-neovim pyhon3-neovim
|
||||
elif [ $ARCH == 1]; then
|
||||
inst python2-neovim python-neovim
|
||||
fi
|
||||
|
||||
if [ $DEBIAN == 1 ]; then
|
||||
inst exuberant-ctags
|
||||
else
|
||||
inst ctags
|
||||
fi
|
||||
vim +PlugUpgrade +PlugUpdate +PlugInstall +qall
|
||||
|
||||
# Common GUI
|
||||
if [ $GUI == 1 ]; then
|
||||
.Xresources.d/configure
|
||||
|
||||
# Desktop manager
|
||||
inst dunst feh i3-wm i3lock numlockx qutebrowser rofi rxvt-unicode scrot trayer unclutter xautolock xclip
|
||||
if [ $ARCH == 1 ]; then
|
||||
inst xorg-xinit xorg-xbacklight ttf-dejavu autorandr
|
||||
altInst lemonbar-xft-git keynav-enhanced pacmixer rofi-pass
|
||||
elif [ $DEBIAN == 1 ]; then
|
||||
# TODO autorandr pacmixer rofi-pass ttf-dejavu
|
||||
inst lemonbar keynav xbacklight
|
||||
fi
|
||||
|
||||
# Screen filter
|
||||
if [ $ARCH == 1 ]; then
|
||||
altInst sct
|
||||
elif [ $TERMUX != 1 ]; then
|
||||
if [ ! -f $HOME/.local/bin/sct ]; then
|
||||
TMP=$(mktemp /tmp/XXXXXXXXXX.c)
|
||||
wget https://gist.githubusercontent.com/ajnirp/208c03d3aa7f02c743d2/raw/55bf3eed25739173d8be57b5179ed5542cf40ed6/sct.c -O $TMP
|
||||
cc $TMP --std=c99 -lX11 -lXrandr -o $HOME/.local/bin/sct
|
||||
rm $TMP
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [ $EXTRA == 1 ]; then
|
||||
# Extra dev (not on mobile though ^^)
|
||||
if [ $TERMUX == 0 ]; then
|
||||
inst cmake clang llvm ccache python-pip gdb
|
||||
fi
|
||||
|
||||
# Extra CLI
|
||||
inst ffmpeg optipng syncthing mutt msmtp notmuch mbsync jq lynx strace
|
||||
inst unzip unrar jdupes bedup p7zip
|
||||
inst youtube-dl megatools speedtest-cli
|
||||
systemdUserUnit syncthing
|
||||
if [ $ARCH == 1 ]; then
|
||||
insta pandoc youtube-dl translate-shell imagemagick
|
||||
altInst insect pdftk visidata
|
||||
|
||||
# Orga
|
||||
# TODO For others
|
||||
inst vdirsyncer khard todoman offlineimap khal
|
||||
systemdUserUnit vdirsyncer.timer
|
||||
elif [ $DEBIAN == 1]; then
|
||||
inst pandoc pdftk visidata translate-shell youtube-dl
|
||||
else
|
||||
# translate-shell
|
||||
curl -L git.io/trans > ~/.local/bin/trans
|
||||
chmod +x ~/.local/bin/trans
|
||||
|
||||
# TODO Others
|
||||
fi
|
||||
|
||||
# FPGA goodness
|
||||
if [ $ARCH == 1 ]; then
|
||||
inst iverilog
|
||||
altInst ghdl
|
||||
fi
|
||||
|
||||
# Extra GUI
|
||||
if [ $GUI == 1 ]; then
|
||||
inst vlc gimp inkscape mpd thunar musescore llpp pdfpc texlive-{most,lang}
|
||||
|
||||
if [ $ARCH == 1 ]; then
|
||||
inst simplescreenrecorder mpc
|
||||
altInst vimpc-git ashuffle-git ttf-emojione-color puddletag
|
||||
|
||||
# FPGA goodness
|
||||
inst gtkwave
|
||||
fi
|
||||
|
||||
# TODO Others
|
||||
|
||||
fi
|
||||
fi
|
||||
|
43
config/scripts/install-termux
Executable file
43
config/scripts/install-termux
Executable file
|
@ -0,0 +1,43 @@
|
|||
#!/data/data/com.termux/files/usr/bin/env bash
|
||||
|
||||
# Setups a Termux system the way I like it
|
||||
|
||||
if [ ! -d /data/data/com.termux/files ]; then
|
||||
echo "This is not a Termux system (or pacman isn't installed)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Configuration
|
||||
function prompt { # text
|
||||
while true; do
|
||||
read -p "$1 [yn] " yn
|
||||
case $yn in
|
||||
[Yy]* ) return 1;;
|
||||
[Nn]* ) return 0;;
|
||||
* ) echo "Please answer yes or no.";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
if [ -z $ROOT ]; then
|
||||
prompt "Is this device rooted and BusyBox installed in /system/xbin/?"
|
||||
ROOT=$?
|
||||
fi
|
||||
|
||||
# Update
|
||||
apt update
|
||||
apt upgrade
|
||||
|
||||
# (needed for install-prefs)
|
||||
apt install coreutils
|
||||
apt install grep
|
||||
# Used by some of my termux scripts
|
||||
apt install jq
|
||||
|
||||
# Config
|
||||
touch ~/.hushlogin
|
||||
|
||||
if [ $ROOT == 1 ]; then
|
||||
apt install tsu
|
||||
echo '/system/xbin/mount -o remount,rw /; ln -s /data/data/com.termux/files/usr /usr; /system/xbin/mount -o remount,ro /' | tsu
|
||||
fi
|
11
config/scripts/install-wsl
Executable file
11
config/scripts/install-wsl
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Setups a WSL system the way I like it
|
||||
|
||||
# Remember to use the alwsl script on GitHub to install Arch Linux on WSL
|
||||
# Use github:cbucher/console as a console and github:mintty/wsltty as a shell
|
||||
# (%LOCALAPPDATA%/wsltty/bin/wsl-bridge) to avoid arrow keys bypassing
|
||||
|
||||
(cd /usr/share/i18n/charmaps/; sudo gunzip -k UTF8.gz)
|
||||
echo "fr_FR.UTF-8 UTF-8" | sudo tee -a /etc/locale.gen
|
||||
sudo locale-gen
|
6
config/scripts/logstasync
Executable file
6
config/scripts/logstasync
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
serv="$1"
|
||||
shift
|
||||
|
||||
ssh "$serv" 'sudo tail --follow=name --retry --quiet $(sudo find $(echo /var/log/$([ -d /var/log/httpd/ ] && echo httpd || echo apache2)) -type f -name *access.log)' | logstalgia --sync "$@"
|
22
config/scripts/lorem
Executable file
22
config/scripts/lorem
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Generates Lorem Ipsum
|
||||
|
||||
original="Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus. Suspendisse lectus tortor, dignissim sit amet, adipiscing nec, ultricies sed, dolor. Cras elementum ultrices diam. Maecenas ligula massa, varius a, semper congue, euismod non, mi. Proin porttitor, orci nec nonummy molestie, enim est eleifend mi, non fermentum diam nisl sit amet erat. Duis semper. Duis arcu massa, scelerisque vitae, consequat in, pretium a, enim. Pellentesque congue. Ut in risus volutpat libero pharetra tempor. Cras vestibulum bibendum augue. Praesent egestas leo in pede. Praesent blandit odio eu enim. Pellentesque sed dui ut augue blandit sodales. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Aliquam nibh. Mauris ac mauris sed pede pellentesque fermentum. Maecenas adipiscing ante non diam sodales hendrerit.
|
||||
|
||||
Ut velit mauris, egestas sed, gravida nec, ornare ut, mi. Aenean ut orci vel massa suscipit pulvinar. Nulla sollicitudin. Fusce varius, ligula non tempus aliquam, nunc turpis ullamcorper nibh, in tempus sapien eros vitae ligula. Pellentesque rhoncus nunc et augue. Integer id felis. Curabitur aliquet pellentesque diam. Integer quis metus vitae elit lobortis egestas. Lorem ipsum dolor sit amet, consectetuer adipiscing elit. Morbi vel erat non mauris convallis vehicula. Nulla et sapien. Integer tortor tellus, aliquam faucibus, convallis id, congue eu, quam. Mauris ullamcorper felis vitae erat. Proin feugiat, augue non elementum posuere, metus purus iaculis lectus, et tristique ligula justo vitae magna.
|
||||
|
||||
Aliquam convallis sollicitudin purus. Praesent aliquam, enim at fermentum mollis, ligula massa adipiscing nisl, ac euismod nibh nisl eu lectus. Fusce vulputate sem at sapien. Vivamus leo. Aliquam euismod libero eu enim. Nulla nec felis sed leo placerat imperdiet. Aenean suscipit nulla in justo. Suspendisse cursus rutrum augue. Nulla tincidunt tincidunt mi. Curabitur iaculis, lorem vel rhoncus faucibus, felis magna fermentum augue, et ultricies lacus lorem varius purus. Curabitur eu amet.
|
||||
"
|
||||
|
||||
repet=$1
|
||||
|
||||
if [ -z $repet ]; then
|
||||
repet=1
|
||||
fi
|
||||
|
||||
for i in $(seq 1 $repet)
|
||||
do
|
||||
echo -e "$original"
|
||||
done
|
||||
|
517
config/scripts/machines
Executable file
517
config/scripts/machines
Executable file
|
@ -0,0 +1,517 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Handles indexing and SSH keys of machines I
|
||||
# have access on
|
||||
|
||||
MACHINES_HOME=$HOME
|
||||
MACHINES_CONFIG=$HOME/.config/machines
|
||||
MACHINES_API=https://machines.frogeye.fr
|
||||
|
||||
mkdir -p "$MACHINES_HOME" &> /dev/null
|
||||
mkdir -p "$MACHINES_CONFIG" &> /dev/null
|
||||
|
||||
# COMMON
|
||||
|
||||
function prompt { # text
|
||||
while true
|
||||
do
|
||||
read -r -p "$1 [yn] " yn
|
||||
case $yn in
|
||||
[Yy]* ) return 1;;
|
||||
[Nn]* ) return 0;;
|
||||
* ) echo "Please answer y or n.";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
# From https://gist.github.com/cdown/1163649
|
||||
|
||||
urlencode() { # string
|
||||
old_lc_collate=$LC_COLLATE
|
||||
LC_COLLATE=C
|
||||
local length="${#1}"
|
||||
for (( i = 0; i < length; i++ )); do
|
||||
local c="${1:i:1}"
|
||||
case $c in
|
||||
[a-zA-Z0-9.~_-]) printf "%s" "$c" ;;
|
||||
*) printf '%%%02X' "'$c" ;;
|
||||
esac
|
||||
done
|
||||
LC_COLLATE=$old_lc_collate
|
||||
}
|
||||
|
||||
urldecode() { # string
|
||||
local url_encoded="${1//+/ }"
|
||||
printf '%b' "${url_encoded//%/\\x}"
|
||||
}
|
||||
|
||||
# API ACCESS
|
||||
|
||||
function _machines-api {
|
||||
route=$1
|
||||
shift
|
||||
wget "$MACHINES_API/$route" --content-on-error --quiet --output-document=- "$@"
|
||||
result=$?
|
||||
if [ $result != 0 ]; then
|
||||
echo "[ERROR] wget returned $result for route $route" 1>&2;
|
||||
exit 2
|
||||
fi
|
||||
}
|
||||
|
||||
function _machines-apiToken {
|
||||
read -r -p 'TOTP token: ' token
|
||||
_machines-api "$@" --header="X-TOTP: $token"
|
||||
}
|
||||
|
||||
function _machines-apiSigned {
|
||||
_machines-ensureAdmin
|
||||
_machines-api "$@" --certificate="$MACHINES_CONFIG/machines.crt" --private-key="$MACHINES_CONFIG/machines.key"
|
||||
}
|
||||
|
||||
|
||||
# APPLICATION KEYS & CERTIFICATE
|
||||
|
||||
function _machines-pubFromCrt {
|
||||
openssl x509 -in "$MACHINES_CONFIG/machines.crt" -pubkey -noout > "$MACHINES_CONFIG/machines.pub"
|
||||
}
|
||||
|
||||
function _machines-verifyCertificate {
|
||||
return
|
||||
if openssl verify "$MACHINES_CONFIG/machines.crt" | grep -v 'error 18' | grep 'error' --quiet; then
|
||||
echo "[ERROR] Invalid certificate" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function _machines-ensurePub {
|
||||
if [ ! -f "$MACHINES_CONFIG/machines.crt" ]; then
|
||||
CERT_FILE=$(mktemp)
|
||||
echo "[INFO] Downloading certificate..."
|
||||
_machines-api cert > "$CERT_FILE"
|
||||
openssl x509 -fingerprint -in "$CERT_FILE" | grep Fingerprint --color=never
|
||||
prompt "Is this correct ?"
|
||||
if [ $? == 1 ]; then
|
||||
mv "$CERT_FILE" "$MACHINES_CONFIG/machines.crt" &> /dev/null
|
||||
else
|
||||
echo "[ERROR] Certificate rejected." 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
_machines-verifyCertificate
|
||||
if [ ! -f "$MACHINES_CONFIG/machines.pub" ]; then
|
||||
_machines-pubFromCrt
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
function _machines-ensureAdmin {
|
||||
if [ ! -f "$MACHINES_CONFIG/machines.key" ]; then
|
||||
echo "[ERROR] You need have to have the private key to do that" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# SSH ACCESS KEYS
|
||||
|
||||
function _machines-signAkey { # network
|
||||
KEY_FILE=$(mktemp)
|
||||
SIGN_FILE=$(mktemp)
|
||||
_machines-apiSigned "akey/$1?unsigned" > "$KEY_FILE"
|
||||
openssl dgst -sha256 -sign "$MACHINES_CONFIG/machines.key" -out "$SIGN_FILE" "$KEY_FILE"
|
||||
_machines-apiSigned "akey/$1" --method=PUT --body-file="$SIGN_FILE"
|
||||
rm "$KEY_FILE" "$SIGN_FILE" &> /dev/null
|
||||
}
|
||||
|
||||
function _machines-getAkey { # network
|
||||
_machines-ensurePub
|
||||
KEY_FILE=$(mktemp)
|
||||
SIGN_FILE=$(mktemp)
|
||||
_machines-api "akey/$1" > "$KEY_FILE"
|
||||
_machines-api "akey/$1?signature" > "$SIGN_FILE"
|
||||
md5sum "$KEY_FILE" 1>&2;
|
||||
md5sum "$SIGN_FILE" 1>&2;
|
||||
md5sum "$MACHINES_CONFIG/machines.pub" 1>&2;
|
||||
|
||||
if openssl dgst -sha256 -verify "$MACHINES_CONFIG/machines.pub" -signature "$SIGN_FILE" "$KEY_FILE" &> /dev/null
|
||||
then
|
||||
cat "$KEY_FILE"
|
||||
\rm "$KEY_FILE" "$SIGN_FILE"
|
||||
return 0
|
||||
else
|
||||
\rm "$KEY_FILE" "$SIGN_FILE"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function _machines-updateAkey {
|
||||
MYKEY_FILE=$(mktemp)
|
||||
network=$(grep '^network=' "$MACHINES_CONFIG/this" | cut -d '=' -f 2)
|
||||
if _machines-getAkey "$network" > "$MYKEY_FILE"
|
||||
then
|
||||
\mv -f "$MYKEY_FILE" "$MACHINES_HOME/.ssh/authorized_keys"
|
||||
return 0
|
||||
else
|
||||
cat "$MYKEY_FILE"
|
||||
echo "[ERROR] Authorized keys are not properly signed" 1>&2;
|
||||
\rm "$MYKEY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function _machines-postFile { # filename
|
||||
cat $1 | while read -r line; do
|
||||
parameter=$(echo "$line" | cut -d '=' -f 1)
|
||||
value="$(echo "$line" | sed 's/^[a-zA-Z0-9]\+\(\[\]\)\?=//')"
|
||||
echo -n "&$parameter=$(urlencode "$value")"
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
function _machines-addElement { # element elementType default
|
||||
FILE=$(mktemp)
|
||||
echo -e "$3" > "$FILE"
|
||||
$EDITOR "$FILE"
|
||||
data=$(_machines-postFile "$FILE")
|
||||
\rm "$FILE"
|
||||
_machines-apiSigned "$2" --post-data "name=$1$data"
|
||||
}
|
||||
|
||||
function _machines-viewElement { # element elementType
|
||||
_machines-apiSigned "$2/$1"
|
||||
}
|
||||
|
||||
function _machines-editElement { # element elementType
|
||||
FILE=$(mktemp)
|
||||
_machines-apiSigned "$2/$1" > "$FILE"
|
||||
$EDITOR "$FILE"
|
||||
data=$(_machines-postFile "$FILE")
|
||||
rm "$FILE" &> /dev/null
|
||||
err=$(_machines-apiSigned "$2/$1" --post-data "$data")
|
||||
}
|
||||
|
||||
function _machines-deleteElement { # element elementType
|
||||
err=$(_machines-apiSigned "$2/$1" --method=DELETE)
|
||||
}
|
||||
|
||||
|
||||
# USER ADMIN FUNCTIONS
|
||||
|
||||
function machines_history {
|
||||
if [ -f "$MACHINES_CONFIG/lastVerifiedLog" ]; then
|
||||
from=$(<"$MACHINES_CONFIG/lastVerifiedLog")
|
||||
else
|
||||
from=0
|
||||
fi
|
||||
d=$(date +%s)
|
||||
_machines-apiSigned log?from=$from | less
|
||||
if prompt "Is this OK?"
|
||||
then
|
||||
exit 1
|
||||
else
|
||||
echo "$d" > "$MACHINES_CONFIG/lastVerifiedLog"
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
function machines_sign {
|
||||
machines_history
|
||||
echo "Signing default network authorized_keys..."
|
||||
_machines-signAkey
|
||||
_machines-apiSigned network | while read -r network; do
|
||||
echo "Signing network $network authorized_keys..."
|
||||
_machines-signAkey $network
|
||||
done
|
||||
}
|
||||
|
||||
function machines_machine_list {
|
||||
_machines-apiSigned machine
|
||||
}
|
||||
|
||||
function machines_network_list {
|
||||
_machines-apiSigned network
|
||||
}
|
||||
|
||||
function machines_machine_add_help {
|
||||
echo "Usage: $0 machine|mac|m add MACHINE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " MACHINE machine to add"
|
||||
return 0
|
||||
}
|
||||
function machines_machine_add { # machine
|
||||
if [ -z "$1" ]; then
|
||||
machines_machine_add_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-addElement "$1" machine "host[]=\nnetwork=\nuserkey=\nhostkey=\nuser="
|
||||
}
|
||||
|
||||
function machines_network_add_help {
|
||||
echo "Usage: $0 network|net|n add NETWORK"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " NETWORK Network to add"
|
||||
return 0
|
||||
}
|
||||
|
||||
function machines_network_add { # network
|
||||
if [ -z "$1" ]; then
|
||||
machines_network_add_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-addElement "$1" network "allowed[]=\nsecure=false"
|
||||
}
|
||||
|
||||
function machines_machine_view_help {
|
||||
echo "Usage: $0 machine|mac|m view MACHINE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " MACHINE machine to view"
|
||||
return 0
|
||||
}
|
||||
|
||||
function machines_machine_view { # machine
|
||||
if [ -z "$1" ]; then
|
||||
machines_machine_view_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-viewElement "$1" machine
|
||||
}
|
||||
|
||||
function machines_network_view_help {
|
||||
echo "Usage: $0 network|net|n view NETWORK"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " NETWORK Network to view"
|
||||
return 0
|
||||
}
|
||||
function machines_network_view { # network
|
||||
if [ -z "$1" ]; then
|
||||
machines_network_view_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-viewElement "$1" network
|
||||
}
|
||||
|
||||
function machines_machine_edit_help {
|
||||
echo "Usage: $0 machine|mac|m edit MACHINE"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " MACHINE machine to edit"
|
||||
return 0
|
||||
}
|
||||
|
||||
function machines_machine_edit { # machine
|
||||
if [ -z "$1" ]; then
|
||||
machines_machine_edit_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-editElement "$1" machine
|
||||
}
|
||||
|
||||
function machines_network_edit_help {
|
||||
echo "Usage: $0 network|net|n edit NETWORK"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " NETWORK Network to edit"
|
||||
return 0
|
||||
}
|
||||
function machines_network_edit { # network
|
||||
if [ -z "$1" ]; then
|
||||
machines_network_edit_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-editElement "$1" network
|
||||
}
|
||||
|
||||
function machines_machine_delete_help {
|
||||
echo "Usage: $0 machine|mac|m delete machine"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " MACHINE machine to remove"
|
||||
return 0
|
||||
}
|
||||
function machines_machine_delete { # machine
|
||||
if [ -z "$1" ]; then
|
||||
machines_machine_delete_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-deleteElement "$1" machine
|
||||
}
|
||||
|
||||
function machines_network_delete_help {
|
||||
echo "Usage: $0 network|net|n delete NETWORK"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " NETWORK Network to remove"
|
||||
return 0
|
||||
}
|
||||
function machines_network_delete { # network
|
||||
if [ -z "$1" ]; then
|
||||
machines_network_delete_help
|
||||
exit 1
|
||||
fi
|
||||
_machines-deleteElement "$1" network
|
||||
}
|
||||
|
||||
function machines_machine_help {
|
||||
echo "Usage: $0 machine|mac|m COMMAND"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo " list List all machines"
|
||||
echo " add Interactively add a machine"
|
||||
echo " view Display a machine"
|
||||
echo " edit Interactively edit a specified machine"
|
||||
echo " delete Remove a specified machine"
|
||||
echo " help Get help with commands"
|
||||
return 0
|
||||
}
|
||||
function machines_machine {
|
||||
command="$1"
|
||||
shift
|
||||
if type "machines_machine_$command" &> /dev/null; then
|
||||
"machines_machine_$command" "$@"
|
||||
else
|
||||
machines_machine_help
|
||||
fi
|
||||
}
|
||||
|
||||
function machines_network_help {
|
||||
echo "Usage: $0 network|net|n COMMAND"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo " list List all networks"
|
||||
echo " add Interactively add a network"
|
||||
echo " view Display a network"
|
||||
echo " edit Interactively edit a specified network"
|
||||
echo " delete Remove a specified network"
|
||||
echo " help Get help with commands"
|
||||
return 0
|
||||
}
|
||||
function machines_network {
|
||||
command="$1"
|
||||
shift
|
||||
if type "machines_network_$command" &> /dev/null; then
|
||||
"machines_network_$command" "$@"
|
||||
else
|
||||
machines_network_help
|
||||
fi
|
||||
}
|
||||
|
||||
machines_mac() { machines_machine "$@"; }
|
||||
machines_m() { machines_machine "$@"; }
|
||||
machines_net() { machines_network "$@"; }
|
||||
machines_n() { machines_network "$@"; }
|
||||
machines_mac_help() { machines_machine_help "$@"; }
|
||||
machines_m_help() { machines_machine_help "$@"; }
|
||||
machines_net_help() { machines_network_help "$@"; }
|
||||
machines_n_help() { machines_network_help "$@"; }
|
||||
|
||||
function machines_update-all {
|
||||
machines_machine_list | while read -r machine; do
|
||||
echo "Updating $machine..."
|
||||
ssh "$machine" 'cd .dotfiles && git pull; machines update' &
|
||||
done
|
||||
}
|
||||
|
||||
function machines_regen-keys {
|
||||
if [[ -e $MACHINES_CONFIG/machines.key || -e $MACHINES_CONFIG/machines.pub || -e $MACHINES_CONFIG/machines.crt ]]; then
|
||||
echo "[ERROR] Please delete the pem files manually to prove you know what you're doing." 1>&2;
|
||||
exit 1
|
||||
else
|
||||
openssl genrsa -out "$MACHINES_CONFIG/machines.key" 4096
|
||||
chmod 600 "$MACHINES_CONFIG/machines.key"
|
||||
openssl req -key "$MACHINES_CONFIG/machines.key" -new -out "$MACHINES_CONFIG/machines.csr"
|
||||
openssl x509 -req -days 1826 -in "$MACHINES_CONFIG/machines.csr" -signkey "$MACHINES_CONFIG/machines.key" -out "$MACHINES_CONFIG/machines.crt"
|
||||
_machines-pubFromCrt
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# USER FUNCTIONS
|
||||
|
||||
function machines_setup {
|
||||
if [ -e "$MACHINES_CONFIG/this.name" ]; then
|
||||
echo "[ERROR] This machine is already set up" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
_machines-ensurePub
|
||||
|
||||
# Variables
|
||||
read -r -p 'Machine name? ' name
|
||||
read -r -p 'Hosts (separated by spaces)? ' hosts
|
||||
|
||||
# User key
|
||||
mkdir -p "$MACHINES_HOME/.ssh" &> /dev/null
|
||||
if [[ ! -f $MACHINES_HOME/.ssh/id_rsa || ! -f $MACHINES_HOME/.ssh/id_rsa.pub ]]; then
|
||||
ssh-keygen -b 4096 -C "$name@machines.frogeye.fr" -f "$MACHINES_HOME/.ssh/id_rsa" -t rsa
|
||||
fi
|
||||
userkey=$(<"$MACHINES_HOME/.ssh/id_rsa.pub")
|
||||
|
||||
# Host key
|
||||
for type in ecdsa ed25519 rsa dsa; do
|
||||
if [ -f "/etc/ssh/ssh_host_${type}_key.pub" ]; then
|
||||
hostkey=$(<"/etc/ssh/ssh_host_${type}_key.pub")
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# Subscription
|
||||
data="name=$(urlencode "$name")&userkey=$(urlencode "$userkey")&hostkey=$(urlencode "$hostkey")&user=$(urlencode "$USER")"
|
||||
for host in $hosts; do
|
||||
data="$data&host[]=$(urlencode "$host")"
|
||||
done
|
||||
|
||||
_machines-apiToken machine --post-data "$data"
|
||||
|
||||
echo "$name" > "$MACHINES_CONFIG/this.name"
|
||||
machines_update
|
||||
}
|
||||
|
||||
function machines_update {
|
||||
_machines-api "machine/$(cat "$MACHINES_CONFIG/this.name")" > "$MACHINES_CONFIG/this"
|
||||
_machines-updateAkey
|
||||
}
|
||||
|
||||
function machines_totp {
|
||||
url=$(_machines-apiSigned totp)
|
||||
echo "URL : $url"
|
||||
echo "$url" | qrencode -o - | feh -
|
||||
}
|
||||
|
||||
|
||||
function machines_help {
|
||||
command="$1"
|
||||
if [ -n "$command" ]; then
|
||||
if type "machines_${command}_help" &> /dev/null; then
|
||||
shift
|
||||
"machines_${command}_help" "$@"
|
||||
return $?
|
||||
fi
|
||||
fi
|
||||
echo "Usage: $0 COMMAND"
|
||||
echo
|
||||
echo "User commands:"
|
||||
echo " setup Interactive initial setup for new machine"
|
||||
echo " update Update this machine"
|
||||
echo " help Get help with commands"
|
||||
echo
|
||||
echo "Admin commands:"
|
||||
echo " machine|mac|m Modify machines"
|
||||
echo " network|net|n Modify networks"
|
||||
echo " update-all Update all machines available via SSH"
|
||||
echo " regen-keys Regenerate system keys"
|
||||
echo " sign Sign recent transactions for propagation"
|
||||
echo " totp Get TOTP generating QR code / URL"
|
||||
return 0
|
||||
}
|
||||
|
||||
# MAIN
|
||||
command="$1"
|
||||
shift
|
||||
if type "machines_$command" &> /dev/null; then
|
||||
"machines_$command" "$@"
|
||||
else
|
||||
machines_help "$@"
|
||||
fi
|
128
config/scripts/md2html
Executable file
128
config/scripts/md2html
Executable file
|
@ -0,0 +1,128 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
// Imports
|
||||
var fs = require('fs');
|
||||
var marked = require('marked');
|
||||
var highlight = require('highlight.js');
|
||||
var katex = require('katex');
|
||||
var yargs = require('yargs');
|
||||
var extend = require('util')._extend;
|
||||
|
||||
|
||||
// Constants
|
||||
var template = '<!DOCTYPE html> <html lang="fr"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta charset="UTF-8"> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.11.0/styles/xcode.min.css" integrity="sha256-OED7Gmqde0cMVVeo1zVd+3fBD4EST32D4h9YT7KY0aY=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.7.1/katex.min.css" integrity="sha384-wITovz90syo1dJWVh32uuETPVEtGigN07tkttEqPv+uR2SE/mbQcG7ATL28aI9H0" crossorigin="anonymous"> <style type="text/css"> image { max-width: 100 % } </style> <title>%TITLE%</title> </head> <body> <main class="page-content" aria-label="Content"> %BODY% </main> </body> </html>'
|
||||
|
||||
|
||||
// Understanding
|
||||
var argv = yargs
|
||||
.usage("Usage: $0 [options]")
|
||||
.example('$0 -i doc.md -o doc.html', 'Convert doc.md to HTML using the default template')
|
||||
.help('h')
|
||||
.alias('h', 'help')
|
||||
|
||||
.describe('i', 'Input file')
|
||||
.alias('i', 'input')
|
||||
.default('i', '/dev/stdin')
|
||||
|
||||
.describe('o', 'Output file')
|
||||
.alias('o', 'output')
|
||||
.default('o', '/dev/stdout')
|
||||
|
||||
.describe('t', 'Template file (%BODY% is replaced by the text)')
|
||||
.alias('t', 'template')
|
||||
|
||||
.argv;
|
||||
|
||||
if (argv.t) {
|
||||
template = fs.readFileSync(argv.t, "utf8");
|
||||
}
|
||||
|
||||
var latex = true;
|
||||
// TODO Arg
|
||||
|
||||
// Settings
|
||||
|
||||
var extraLangages = {
|
||||
avrpseudo: function(hljs) {
|
||||
lang = extend({}, highlight.getLanguage('avrasm'));
|
||||
lang.keywords.keyword += ' Si Alors Sinon FinSi TantQue FinTantQue Pour FinPour allant de à ←';
|
||||
lang.keywords.keyword += ' Lire Sortir sur Appeler Retourner';
|
||||
lang.keywords.keyword += ' DecalerDroite DecalerGauche';
|
||||
lang.keywords.keyword += ' Incrementer Decrementer';
|
||||
lang.keywords.built_in += ' vrai faux';
|
||||
lang.contains.push({
|
||||
className: 'meta',
|
||||
begin: /Configurer.+/,
|
||||
end: /\n/,
|
||||
});
|
||||
return lang;
|
||||
},
|
||||
avrasmplus: function(hljs) {
|
||||
lang = extend({}, highlight.getLanguage('avrasm'));
|
||||
lang.keywords.keyword += ' si saut alors et ou if then goto && || <-';
|
||||
lang.contains.push({
|
||||
className: 'meta',
|
||||
begin: /@\w+/,
|
||||
});
|
||||
return lang;
|
||||
},
|
||||
};
|
||||
|
||||
for (lang in extraLangages) {
|
||||
// This must be done before any call to highlight.highlight :/
|
||||
highlight.registerLanguage(lang, extraLangages[lang]);
|
||||
}
|
||||
|
||||
var renderer = new marked.Renderer();
|
||||
marked.setOptions({
|
||||
highlight: function(code, lang) {
|
||||
if (lang == 'raw') {
|
||||
return code;
|
||||
} else if (highlight.getLanguage(lang)) {
|
||||
return highlight.highlight(lang, code).value;
|
||||
} else {
|
||||
// if (extraLangages[lang]) {
|
||||
// highlight.registerLanguage(lang, extraLangages[lang]);
|
||||
// return highlight.highlight(lang, code).value;
|
||||
// } else {
|
||||
// }
|
||||
console.warn("Unknown language: " + lang);
|
||||
return highlight.highlightAuto(code).value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Processing
|
||||
markdownString = fs.readFileSync(argv.i, "utf8");
|
||||
|
||||
// TeX
|
||||
if (latex) {
|
||||
markdownString = markdownString.replace(/\\\$/g, '$')
|
||||
markdownString = markdownString.replace(/\$\$([\s\S]+)\$\$/gm, function(glob, formula) {
|
||||
return katex.renderToString(formula, {
|
||||
displayMode: true
|
||||
});
|
||||
});
|
||||
markdownString = markdownString.replace(/\$([^$]+)\$/g, function(glob, formula) {
|
||||
return katex.renderToString(formula, {
|
||||
displayMode: false
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Conversion
|
||||
htmlString = marked(markdownString, {
|
||||
renderer: renderer,
|
||||
breaks: false
|
||||
});
|
||||
// fullHtmlString = htmlString;
|
||||
fullHtmlString = template.replace('%BODY%', () => { return htmlString });
|
||||
|
||||
|
||||
// Saving
|
||||
if (argv.o == '/dev/stdout') {
|
||||
console.log(fullHtmlString);
|
||||
} else {
|
||||
fs.writeFileSync(argv.o, fullHtmlString);
|
||||
}
|
608
config/scripts/mel
Executable file
608
config/scripts/mel
Executable file
|
@ -0,0 +1,608 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Meh mail client
|
||||
A dumb Python scripts that leverages notmuch, mbsync, and msmtp
|
||||
to become a fully-functional extremly-opinonated mail client.
|
||||
"""
|
||||
|
||||
# TODO Features
|
||||
# TODO Implement initial command set
|
||||
# TODO Lockfiles for write operations on mail files (mbsync, tags→maildir operations)
|
||||
# TODO OPTI Lockfile per account and process everything in parallel (if implemented, this
|
||||
# should be optional since while it may speed up the mail fetching process, its multi-threading
|
||||
# nature would cause a lot of cache flushes and be not very efficient on battery)
|
||||
# TODO Handle true character width
|
||||
# TODO IMAP IDLE watches?
|
||||
# TODO GPG
|
||||
# TODO (only then) Refactor
|
||||
# TODO OOP-based
|
||||
# TODO Merge file with melConf
|
||||
|
||||
# DEBUG Small perf profiler
|
||||
import time
|
||||
perf_dict = dict()
|
||||
perf_last = time.perf_counter()
|
||||
def perfstep(name):
|
||||
t = time.perf_counter()
|
||||
global perf_last
|
||||
global perf_dict
|
||||
diff = t - perf_last
|
||||
if name not in perf_dict:
|
||||
perf_dict[name] = 0
|
||||
perf_dict[name] += diff
|
||||
perf_last = time.perf_counter()
|
||||
|
||||
|
||||
import notmuch
|
||||
import logging
|
||||
import coloredlogs
|
||||
import colorama
|
||||
import datetime
|
||||
import os
|
||||
import progressbar
|
||||
import argparse
|
||||
import configparser
|
||||
import base64
|
||||
import shutil
|
||||
import argparse
|
||||
import xdg.BaseDirectory
|
||||
import sys
|
||||
import subprocess
|
||||
import html
|
||||
import re
|
||||
import email.parser
|
||||
|
||||
perfstep("import")
|
||||
|
||||
ACCOUNTS = dict()
|
||||
ALIASES = set()
|
||||
db = None
|
||||
config = None
|
||||
|
||||
def notmuch_new():
|
||||
close_database()
|
||||
log.info("Indexing mails")
|
||||
notmuchConfigPath = os.path.expanduser("~/.config/notmuch-config") # TODO Better
|
||||
cmd = ["notmuch", "--config", notmuchConfigPath, "new"]
|
||||
log.debug(" ".join(cmd))
|
||||
subprocess.run(cmd)
|
||||
|
||||
def list_folders():
|
||||
storagePath = os.path.realpath(os.path.expanduser(config["GENERAL"]["storage"]))
|
||||
folders = list()
|
||||
for account in ACCOUNTS.keys():
|
||||
storagePathAccount = os.path.join(storagePath, account)
|
||||
for root, dirs, files in os.walk(storagePathAccount):
|
||||
if "cur" not in dirs or "new" not in dirs or "tmp" not in dirs:
|
||||
continue
|
||||
assert root.startswith(storagePath)
|
||||
path = root[len(storagePath):]
|
||||
pathSplit = path.split('/')
|
||||
if pathSplit[0] == '':
|
||||
pathSplit = pathSplit[1:]
|
||||
folders.append(tuple(pathSplit))
|
||||
return folders
|
||||
|
||||
def open_database(write=False):
|
||||
global db
|
||||
mode = notmuch.Database.MODE.READ_WRITE if write else notmuch.Database.MODE.READ_ONLY
|
||||
if db:
|
||||
if db.mode == mode:
|
||||
return
|
||||
else:
|
||||
log.info("Current database not in required mode, closing")
|
||||
close_database()
|
||||
log.info("Opening database in mode {}".format(mode))
|
||||
dbPath = os.path.realpath(os.path.expanduser(config["GENERAL"]["storage"]))
|
||||
db = notmuch.Database(mode=mode, path=dbPath)
|
||||
|
||||
def close_database():
|
||||
global db
|
||||
if db:
|
||||
log.info("Closing database")
|
||||
db.close()
|
||||
db = None
|
||||
|
||||
def generate_aliases():
|
||||
for name in config.sections():
|
||||
if not name.islower():
|
||||
continue
|
||||
section = config[name]
|
||||
ALIASES.add(section["from"])
|
||||
if "alternatives" in section:
|
||||
for alt in section["alternatives"].split(";"):
|
||||
ALIASES.add(alt)
|
||||
ACCOUNTS[name] = section
|
||||
|
||||
|
||||
def get_location(msg):
|
||||
path = msg.get_filename()
|
||||
path = os.path.dirname(path)
|
||||
base = db.get_path()
|
||||
assert path.startswith(base)
|
||||
path = path[len(base):]
|
||||
pathSplit = path.split('/')
|
||||
mailbox = pathSplit[1]
|
||||
assert mailbox in ACCOUNTS
|
||||
state = pathSplit[-1]
|
||||
folder = tuple(pathSplit[2:-1])
|
||||
assert state in {'cur', 'tmp', 'new'}
|
||||
return (mailbox, folder, state)
|
||||
|
||||
MAILBOX_COLORS = dict()
|
||||
|
||||
def get_mailbox_color(mailbox):
|
||||
if mailbox not in MAILBOX_COLORS:
|
||||
colorStr = config[mailbox]["color"]
|
||||
colorStr = colorStr[1:] if colorStr[0] == '#' else colorStr
|
||||
R = int(colorStr[0:2], 16)
|
||||
G = int(colorStr[2:4], 16)
|
||||
B = int(colorStr[4:6], 16)
|
||||
MAILBOX_COLORS[mailbox] = '\x1b[38;2;{};{};{}m'.format(R, G, B)
|
||||
return MAILBOX_COLORS[mailbox]
|
||||
|
||||
def format_date(date):
|
||||
now = datetime.datetime.now()
|
||||
midnight = datetime.datetime(year=now.year, month=now.month, day=now.day)
|
||||
if date > midnight:
|
||||
return date.strftime('%H:%M:%S')
|
||||
else:
|
||||
return date.strftime('%d/%m/%y')
|
||||
|
||||
WIDTH_FIXED = 31
|
||||
WIDTH_RATIO_DEST_SUBJECT = 0.3
|
||||
ISATTY = sys.stdout.isatty()
|
||||
destWidth = None
|
||||
subjectWidth = None
|
||||
def compute_line_format():
|
||||
if ISATTY:
|
||||
columns, rows = shutil.get_terminal_size((80, 20))
|
||||
remain = columns - WIDTH_FIXED - 1
|
||||
global destWidth, subjectWidth
|
||||
destWidth = int(remain * WIDTH_RATIO_DEST_SUBJECT)
|
||||
subjectWidth = remain - destWidth
|
||||
else:
|
||||
destWidth = None
|
||||
subjectWidth = None
|
||||
|
||||
def clip_text(size, text):
|
||||
if size is None:
|
||||
return text
|
||||
l = len(text)
|
||||
if l == size:
|
||||
return text
|
||||
elif l > size:
|
||||
return text[:size-1] + '…'
|
||||
elif l < size:
|
||||
return text + " " * (size - l)
|
||||
|
||||
|
||||
def print_msg(msg):
|
||||
if not destWidth:
|
||||
compute_line_format()
|
||||
|
||||
sep = " " if ISATTY else "\t"
|
||||
line = ""
|
||||
tags = set(msg.get_tags())
|
||||
mailbox, folder, state = get_location(msg)
|
||||
if ISATTY:
|
||||
line += get_mailbox_color(mailbox)
|
||||
|
||||
# UID
|
||||
uid = None
|
||||
for tag in tags:
|
||||
if tag.startswith('tuid'):
|
||||
uid = tag[4:]
|
||||
assert isUID(uid), uid
|
||||
line += uid
|
||||
|
||||
# Date
|
||||
line += sep
|
||||
date = datetime.datetime.fromtimestamp(msg.get_date())
|
||||
line += format_date(date)
|
||||
|
||||
# Icons
|
||||
line += sep
|
||||
def tags2col1(tag1, tag2, both, first, second, none):
|
||||
nonlocal line
|
||||
if tag1 in tags:
|
||||
if tag2 in tags:
|
||||
line += both
|
||||
else:
|
||||
line += first
|
||||
else:
|
||||
if tag2 in tags:
|
||||
line += second
|
||||
else:
|
||||
line += none
|
||||
|
||||
tags2col1('spam', 'draft', '?', 'S', 'D', ' ')
|
||||
tags2col1('attachment', 'encrypted', 'E', 'A', 'E', ' ')
|
||||
tags2col1('unread', 'flagged', '!', 'U', 'F', ' ')
|
||||
tags2col1('sent', 'replied', '?', '↑', '↪', ' ')
|
||||
|
||||
if 'sent' in tags:
|
||||
dest = msg.get_header("to")
|
||||
else:
|
||||
dest = msg.get_header("from")
|
||||
line += sep
|
||||
line += clip_text(destWidth, dest)
|
||||
|
||||
# Subject
|
||||
line += sep
|
||||
subject = msg.get_header("subject")
|
||||
line += clip_text(subjectWidth, subject)
|
||||
|
||||
if ISATTY:
|
||||
line += colorama.Style.RESET_ALL
|
||||
print(line)
|
||||
|
||||
|
||||
def retag_msg(msg):
|
||||
mailbox, folder, state = get_location(msg)
|
||||
|
||||
# Search-friendly folder name
|
||||
slugFolderList = list()
|
||||
for f, fold in [(f, folder[f]) for f in range(len(folder))]:
|
||||
if f == 0 and len(folder) > 1 and fold == "INBOX":
|
||||
continue
|
||||
slugFolderList.append(fold.upper())
|
||||
slugFolder = tuple(slugFolderList)
|
||||
|
||||
tags = set(msg.get_tags())
|
||||
|
||||
def tag_if(tag, condition):
|
||||
if condition and tag not in tags:
|
||||
msg.add_tag(tag)
|
||||
elif not condition and tag in tags:
|
||||
msg.remove_tag(tag)
|
||||
expeditor = extract_email(msg.get_header('from'))
|
||||
|
||||
tag_if('inbox', slugFolder[0] == 'INBOX')
|
||||
tag_if('spam', slugFolder[0] == 'JUNK' or slugFolder[0] == 'SPAM')
|
||||
tag_if('deleted', slugFolder[0] == 'TRASH')
|
||||
tag_if('draft', slugFolder[0] == 'DRAFTS')
|
||||
tag_if('sent', expeditor in ALIASES)
|
||||
tag_if('unprocessed', False)
|
||||
|
||||
# UID
|
||||
uid = msg.get_header("X-TUID")
|
||||
if not isUID(uid):
|
||||
# TODO Happens to sent mails but should it?k
|
||||
print(f"{msg.get_filename()} has no UID!")
|
||||
return
|
||||
uidtag = 'tuid{}'.format(uid)
|
||||
# Remove eventual others UID
|
||||
for tag in tags:
|
||||
if tag.startswith('tuid') and tag != uidtag:
|
||||
msg.remove_tag(tag)
|
||||
msg.add_tag(uidtag)
|
||||
|
||||
|
||||
|
||||
def extract_email(field):
|
||||
try:
|
||||
sta = field.index('<')
|
||||
sto = field.index('>')
|
||||
return field[sta+1:sto]
|
||||
except ValueError:
|
||||
return field
|
||||
|
||||
def applyMsgs(queryStr, action, *args, showProgress=False, write=False, closeDb=True, **kwargs):
|
||||
open_database(write=write)
|
||||
|
||||
log.info("Querying {}".format(queryStr))
|
||||
query = notmuch.Query(db, queryStr)
|
||||
query.set_sort(notmuch.Query.SORT.OLDEST_FIRST)
|
||||
|
||||
elements = query.search_messages()
|
||||
nbMsgs = query.count_messages()
|
||||
|
||||
iterator = progressbar.progressbar(elements, max_value=nbMsgs) if showProgress else elements
|
||||
|
||||
log.info("Executing {}".format(action))
|
||||
for msg in iterator:
|
||||
if write:
|
||||
msg.freeze()
|
||||
|
||||
action(msg, *args, **kwargs)
|
||||
|
||||
if write:
|
||||
msg.thaw()
|
||||
msg.tags_to_maildir_flags()
|
||||
|
||||
if closeDb:
|
||||
close_database()
|
||||
|
||||
return nbMsgs
|
||||
|
||||
def notify_msg(msg):
|
||||
log.info("Sending notification for {}".format(msg))
|
||||
subject = msg.get_header("subject")
|
||||
expd = msg.get_header("from")
|
||||
account, _, _ = get_location(msg)
|
||||
|
||||
summary = '{} (<i>{}</i>)'.format(html.escape(expd), account)
|
||||
body = html.escape(subject)
|
||||
cmd = ["notify-send", "-u", "low", "-i", "mail-message-new", summary, body]
|
||||
print(' '.join(cmd))
|
||||
subprocess.run(cmd)
|
||||
|
||||
|
||||
def notify_all(*args, **kwargs):
|
||||
open_database()
|
||||
nbMsgs = applyMsgs('tag:unread and tag:unprocessed', notify_msg)
|
||||
if nbMsgs > 0:
|
||||
log.info("Playing notification sound ({} new message(s))".format(nbMsgs))
|
||||
cmd = ["play", "-n", "synth", "sine", "E4", "sine", "A5", "remix", "1-2", "fade", "0.5", "1.2", "0.5", "2"]
|
||||
subprocess.run(cmd)
|
||||
close_database()
|
||||
|
||||
def isUID(uid):
|
||||
return isinstance(uid, str) and len(uid) == 12 and re.match('^[a-zA-Z0-9+/]{12}$', uid)
|
||||
|
||||
# From https://stackoverflow.com/a/312464
|
||||
def chunks(l, n):
|
||||
"""Yield successive n-sized chunks from l."""
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i:i + n]
|
||||
|
||||
def apply_msgs_input(argmessages, action, write=False):
|
||||
if not len(argmessages):
|
||||
fromStdin = not sys.stdin.isatty()
|
||||
else:
|
||||
fromStdin = len(argmessages) == 1 and argmessages == '-'
|
||||
|
||||
messages = list()
|
||||
if fromStdin:
|
||||
for line in sys.stdin:
|
||||
uid = line[:12]
|
||||
if not isUID(uid):
|
||||
log.error("Not an UID: {}".format(uid))
|
||||
continue
|
||||
messages.append(uid)
|
||||
else:
|
||||
for uids in argmessages:
|
||||
if len(uids) > 12:
|
||||
log.warn("Might have forgotten some spaces between the UIDs. Don't worry, I'll split them for you")
|
||||
for uid in chunks(uids, 12):
|
||||
if not isUID(uid):
|
||||
log.error("Not an UID: {}".format(uid))
|
||||
continue
|
||||
messages.append(uid)
|
||||
|
||||
for message in messages:
|
||||
queryStr = 'tag:tuid{}'.format(message)
|
||||
nbMsgs = applyMsgs(queryStr, action, write=write, closeDb=False)
|
||||
if nbMsgs < 1:
|
||||
log.error("Couldn't execute function for message {}".format(message))
|
||||
close_database()
|
||||
|
||||
def format_header_value(val):
|
||||
return val.replace('\n', '').replace('\t', '').strip()
|
||||
|
||||
# From https://stackoverflow.com/a/1094933
|
||||
def sizeof_fmt(num, suffix='B'):
|
||||
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
|
||||
if abs(num) < 1024.0:
|
||||
return "%3.1f %s%s" % (num, unit, suffix)
|
||||
num /= 1024.0
|
||||
return "%.1f %s%s" % (num, 'Yi', suffix)
|
||||
|
||||
PART_MULTI_FORMAT = colorama.Fore.BLUE + '{nb} {indent}+ {typ}' + colorama.Style.RESET_ALL
|
||||
PART_LEAF_FORMAT = colorama.Fore.BLUE + '{nb} {indent}→ {desc} ({typ}; {size})' + colorama.Style.RESET_ALL
|
||||
def show_parts_tree(part, lvl=0, nb=1):
|
||||
indent = lvl * '\t'
|
||||
typ = part.get_content_type()
|
||||
if part.is_multipart():
|
||||
print(PART_MULTI_FORMAT.format(nb=nb, indent=indent, typ=typ))
|
||||
payl = part.get_payload()
|
||||
size = 1
|
||||
for obj in payl:
|
||||
size += show_parts_tree(obj, lvl=lvl+1, nb=nb+size)
|
||||
return size
|
||||
else:
|
||||
size = len(part.get_payload(decode=True))
|
||||
desc = part.get('Content-Description', '<no description>')
|
||||
print(PART_LEAF_FORMAT.format(nb=nb, indent=indent, typ=typ, desc=desc, size=sizeof_fmt(size)))
|
||||
return 1
|
||||
|
||||
INTERESTING_HEADERS = ["Date", "From", "Subject", "To", "Cc", "Message-Id"]
|
||||
HEADER_FORMAT = colorama.Fore.BLUE + colorama.Style.BRIGHT + '{}:' + colorama.Style.NORMAL + ' {}' + colorama.Style.RESET_ALL
|
||||
def read_msg(msg):
|
||||
# Parse
|
||||
filename = msg.get_filename()
|
||||
parser = email.parser.BytesParser()
|
||||
with open(filename, 'rb') as f:
|
||||
mail = parser.parse(f)
|
||||
|
||||
# Debug
|
||||
global a
|
||||
a = mail
|
||||
|
||||
# Defects
|
||||
if len(mail.defects):
|
||||
log.warn("Defects found in the mail:")
|
||||
for defect in mail.defects:
|
||||
log.warn(mail.defects)
|
||||
|
||||
|
||||
# Headers
|
||||
for key in INTERESTING_HEADERS:
|
||||
val = mail.get(key)
|
||||
if val:
|
||||
val = format_header_value(val)
|
||||
print(HEADER_FORMAT.format(key, val))
|
||||
# TODO Show all headers
|
||||
# TODO BONUS Highlight failed verifications
|
||||
|
||||
show_parts_tree(mail)
|
||||
print()
|
||||
|
||||
# Show text/plain
|
||||
for part in mail.walk():
|
||||
if part.get_content_type() == "text/plain":
|
||||
payl = part.get_payload(decode=True)
|
||||
print(payl.decode())
|
||||
|
||||
|
||||
perfstep("definitions")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Main arguments
|
||||
parser = argparse.ArgumentParser(description="Meh mail client")
|
||||
selectedVerbosityLevels = ["DEBUG", "INFO", "WARNING", "ERROR", "FATAL"]
|
||||
parser.add_argument('-v', '--verbosity', choices=selectedVerbosityLevels, default='WARNING', help="Verbosity of log messages")
|
||||
# parser.add_argument('-n', '--dry-run', action='store_true', help="Don't do anything") # DEBUG
|
||||
defaultConfigFile = os.path.join(xdg.BaseDirectory.xdg_config_home, 'mel', 'accounts.conf')
|
||||
parser.add_argument('-c', '--config', default=defaultConfigFile, help="Accounts config file")
|
||||
|
||||
subparsers = parser.add_subparsers(help="Action to execute")
|
||||
|
||||
## List messages
|
||||
|
||||
def func_default(args):
|
||||
applyMsgs('tag:inbox', print_msg)
|
||||
parser.set_defaults(func=func_default)
|
||||
|
||||
# inbox (default)
|
||||
def func_inbox(args):
|
||||
queryStr = 'tag:unread' if args.only_unread else 'tag:inbox'
|
||||
applyMsgs(queryStr, print_msg)
|
||||
|
||||
parserInbox = subparsers.add_parser("inbox", help="Show unread, unsorted and flagged messages")
|
||||
parserInbox.add_argument('-u', '--only-unread', action='store_true', help="Show unread messages only")
|
||||
# TODO Make this more relevant
|
||||
parserInbox.set_defaults(func=func_inbox)
|
||||
|
||||
|
||||
# list folder [--recurse]
|
||||
## List actions
|
||||
|
||||
|
||||
# flag msg...
|
||||
def func_flag(args):
|
||||
def flag_msg(msg):
|
||||
msg.add_tag('flagged')
|
||||
apply_msgs_input(args.message, flag_msg, write=True)
|
||||
parserFlag = subparsers.add_parser("flag", help="Mark messages as flagged")
|
||||
parserFlag.add_argument('message', nargs='*', help="Messages")
|
||||
parserFlag.set_defaults(func=func_flag)
|
||||
|
||||
|
||||
# unflag msg...
|
||||
def func_unflag(args):
|
||||
def unflag_msg(msg):
|
||||
msg.remove_tag('flagged')
|
||||
apply_msgs_input(args.message, unflag_msg, write=True)
|
||||
parserUnflag = subparsers.add_parser("unflag", help="Mark messages as not-flagged")
|
||||
parserUnflag.add_argument('message', nargs='*', help="Messages")
|
||||
parserUnflag.set_defaults(func=func_unflag)
|
||||
|
||||
|
||||
# delete msg...
|
||||
# spam msg...
|
||||
# move dest msg...
|
||||
## Read message
|
||||
|
||||
|
||||
# read msg [--html] [--plain] [--browser]
|
||||
def func_read(args):
|
||||
apply_msgs_input(args.message, read_msg)
|
||||
parserRead = subparsers.add_parser("read", help="Read message")
|
||||
parserRead.add_argument('message', nargs=1, help="Messages")
|
||||
parserRead.set_defaults(func=func_read)
|
||||
|
||||
|
||||
# attach msg [id] [--save] (list if no id, xdg-open else)
|
||||
## Redaction
|
||||
# new account
|
||||
# reply msg [--all]
|
||||
## Folder management
|
||||
# tree [folder]
|
||||
# mkdir folder
|
||||
# rmdir folder (prevent if folder isn't empty (mail/subfolder))
|
||||
# (yeah that should do)
|
||||
## Meta
|
||||
# setup (interactive thing maybe)
|
||||
|
||||
|
||||
# fetch (mbsync, notmuch new, retag, notify; called by greater gods)
|
||||
def func_fetch(args):
|
||||
# Fetch mails
|
||||
log.info("Fetching mails")
|
||||
mbsyncConfigPath = os.path.expanduser("~/.config/mbsyncrc") # TODO Better
|
||||
cmd = ["mbsync", "--config", mbsyncConfigPath, "--all"]
|
||||
subprocess.run(cmd)
|
||||
|
||||
# Index new mails
|
||||
notmuch_new()
|
||||
|
||||
# Notify
|
||||
notify_all()
|
||||
|
||||
# Tag new mails
|
||||
applyMsgs('tag:unprocessed', retag_msg, showProgress=True, write=True)
|
||||
|
||||
parserFetch = subparsers.add_parser("fetch", help="Fetch mail, tag them, and run notifications")
|
||||
parserFetch.set_defaults(func=func_fetch)
|
||||
|
||||
|
||||
## Debug
|
||||
# debug (various)
|
||||
def func_expose(args):
|
||||
# And leave the door open
|
||||
def expose_msg(a):
|
||||
global msg
|
||||
msg = a
|
||||
applyMsgs('tag:tuidyviU45m6flff', expose_msg, closeDb=False)
|
||||
def func_debug(args):
|
||||
from pprint import pprint
|
||||
pprint(list_folders())
|
||||
parserDebug = subparsers.add_parser("debug", help="Who know what this holds...")
|
||||
parserDebug.set_defaults(verbosity='DEBUG')
|
||||
parserDebug.set_defaults(func=func_debug)
|
||||
|
||||
# retag (all or unprocessed)
|
||||
def func_retag(args):
|
||||
applyMsgs('*', retag_msg, showProgress=True, write=True)
|
||||
parserRetag = subparsers.add_parser("retag", help="Retag all mails (when you changed configuration)")
|
||||
parserRetag.set_defaults(func=func_retag)
|
||||
|
||||
# all
|
||||
def func_all(args):
|
||||
applyMsgs('*', print_msg)
|
||||
|
||||
parserAll = subparsers.add_parser("all", help="Show ALL messages")
|
||||
parserAll.set_defaults(func=func_all)
|
||||
|
||||
# Init
|
||||
args = parser.parse_args()
|
||||
perfstep("parse_args")
|
||||
|
||||
colorama.init()
|
||||
coloredlogs.install(level=args.verbosity, fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
log.info("Loading config {}".format(args.config))
|
||||
if not os.path.isfile(args.config):
|
||||
log.fatal("Config file not found: {}".format(args.config))
|
||||
sys.exit(1)
|
||||
# TODO Create it, maybe?
|
||||
config = configparser.ConfigParser()
|
||||
config.read(args.config)
|
||||
|
||||
generate_aliases()
|
||||
perfstep("config")
|
||||
|
||||
if args.func:
|
||||
log.info("Executing function {}".format(args.func))
|
||||
args.func(args)
|
||||
|
||||
perfstep("exec")
|
||||
|
||||
# DEBUG
|
||||
sys.exit(0)
|
||||
for kv in sorted(perf_dict.items(), key=lambda p: p[1]):
|
||||
log.debug("{1:.6f} {0}".format(*kv))
|
336
config/scripts/melConf
Executable file
336
config/scripts/melConf
Executable file
|
@ -0,0 +1,336 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Meh mail client conf generator for other things
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import os
|
||||
import sys
|
||||
|
||||
# TODO Find config file from XDG
|
||||
# TODO Signature file
|
||||
# TODO Write ~/.mail/[mailbox]/color file if required by sth?
|
||||
# TODO Write in .config or .cache /mel
|
||||
# TODO Fix IMAPS with mbsync
|
||||
|
||||
configPath = os.path.join(os.path.expanduser('~'), '.config', 'mel', 'accounts.conf')
|
||||
|
||||
config = configparser.ConfigParser()
|
||||
config.read(configPath)
|
||||
|
||||
storageFull = os.path.realpath(os.path.expanduser(config["GENERAL"]["storage"]))
|
||||
config["GENERAL"]["storage"] = storageFull
|
||||
|
||||
SERVER_DEFAULTS = {
|
||||
"imap": {"port": 143, "starttls": True},
|
||||
"smtp": {"port": 587, "starttls": True},
|
||||
}
|
||||
SERVER_ITEMS = {"host", "port", "user", "pass", "starttls"}
|
||||
ACCOUNT_DEFAULTS = {
|
||||
"color": "#FFFFFF",
|
||||
"color16": "0",
|
||||
# "colormutt": "white",
|
||||
"inboxfolder": "INBOX",
|
||||
"archivefolder": "Archive",
|
||||
"draftsfolder": "Drafts",
|
||||
"sentfolder": "Sent",
|
||||
"spamfolder": "Spam",
|
||||
"trashfolder": "Trash",
|
||||
}
|
||||
|
||||
# Reading sections
|
||||
accounts = dict()
|
||||
mails = set()
|
||||
|
||||
for name in config.sections():
|
||||
if not name.islower():
|
||||
continue
|
||||
section = config[name]
|
||||
|
||||
data = dict()
|
||||
for server in SERVER_DEFAULTS.keys():
|
||||
for item in SERVER_ITEMS:
|
||||
key = server + item
|
||||
try:
|
||||
val = section.get(key) or section.get(item) or SERVER_DEFAULTS[server][item]
|
||||
except KeyError:
|
||||
raise KeyError("{}.{}".format(name, key))
|
||||
|
||||
if isinstance(val, str):
|
||||
if val == "True":
|
||||
val = True
|
||||
elif val == "False":
|
||||
val = False
|
||||
elif val.isnumeric():
|
||||
val = int(val)
|
||||
data[key] = val
|
||||
|
||||
for key in section.keys():
|
||||
if key in SERVER_ITEMS:
|
||||
continue
|
||||
data[key] = section[key]
|
||||
|
||||
for k, v in config['DEFAULT'].items():
|
||||
if k not in data:
|
||||
data[k] = v
|
||||
|
||||
for k, v in ACCOUNT_DEFAULTS.items():
|
||||
if k not in data:
|
||||
data[k] = v
|
||||
|
||||
mails.add(section["from"])
|
||||
if "alternatives" in section:
|
||||
for alt in section["alternatives"].split(";"):
|
||||
mails.add(alt)
|
||||
|
||||
data["account"] = name
|
||||
data["storage"] = os.path.join(config['GENERAL']['storage'], name)
|
||||
data["storageInbox"] = os.path.join(data["storage"], "INBOX")
|
||||
accounts[name] = data
|
||||
|
||||
general = dict()
|
||||
section = config["GENERAL"]
|
||||
for key in section.keys():
|
||||
general[key] = section[key]
|
||||
general["main"] = accounts[general["main"]]
|
||||
|
||||
|
||||
# OfflineIMAP
|
||||
|
||||
OFFLINEIMAP_BEGIN = """[general]
|
||||
# List of accounts to be synced, separated by a comma.
|
||||
accounts = {}
|
||||
maxsyncaccounts = {}
|
||||
stocktimeout = 60
|
||||
pythonfile = ~/.config/offlineimap.py
|
||||
|
||||
[mbnames]
|
||||
enabled = yes
|
||||
filename = ~/.mutt/mailboxes
|
||||
header = "mailboxes "
|
||||
peritem = "+%(accountname)s/%(foldername)s"
|
||||
sep = " "
|
||||
footer = "\\n"
|
||||
|
||||
"""
|
||||
|
||||
OFFLINEIMAP_ACCOUNT = """[Account {account}]
|
||||
localrepository = {account}-local
|
||||
remoterepository = {account}-remote
|
||||
autorefresh = 0.5
|
||||
quick = 10
|
||||
utf8foldernames = yes
|
||||
postsynchook = ~/.mutt/postsync
|
||||
|
||||
[Repository {account}-local]
|
||||
type = Maildir
|
||||
localfolders = {storage}
|
||||
|
||||
[Repository {account}-remote]
|
||||
type = IMAP
|
||||
{secconf}
|
||||
keepalive = 60
|
||||
holdconnectionopen = yes
|
||||
remotehost = {imaphost}
|
||||
remoteport = {imapport}
|
||||
remoteuser = {imapuser}
|
||||
remotepass = {imappass}
|
||||
|
||||
"""
|
||||
|
||||
offlineIMAPstr = OFFLINEIMAP_BEGIN.format(','.join(accounts), len(accounts))
|
||||
for name, account in accounts.items():
|
||||
if account["imapstarttls"]:
|
||||
secconf = "ssl = no"
|
||||
else:
|
||||
secconf = "sslcacertfile = /etc/ssl/certs/ca-certificates.crt"
|
||||
offlineIMAPstr += OFFLINEIMAP_ACCOUNT.format(**account, secconf=secconf)
|
||||
# TODO Write
|
||||
|
||||
# mbsync
|
||||
MBSYNC_ACCOUNT = """IMAPAccount {account}
|
||||
Host {imaphost}
|
||||
Port {imapport}
|
||||
User {imapuser}
|
||||
Pass "{imappassEscaped}"
|
||||
{secconf}
|
||||
|
||||
IMAPStore {account}-remote
|
||||
Account {account}
|
||||
|
||||
MaildirStore {account}-local
|
||||
Subfolders Verbatim
|
||||
Path {storage}/
|
||||
Inbox {storageInbox}/
|
||||
|
||||
Channel {account}
|
||||
Master :{account}-remote:
|
||||
Slave :{account}-local:
|
||||
Patterns *
|
||||
Create Both
|
||||
SyncState *
|
||||
|
||||
"""
|
||||
|
||||
mbsyncStr = ""
|
||||
for name, account in accounts.items():
|
||||
if account["imapstarttls"]:
|
||||
secconf = "SSLType STARTTLS"
|
||||
else:
|
||||
secconf = "SSLType IMAPS"
|
||||
if "certificate" in account:
|
||||
secconf += "\nCertificateFile {certificate}".format(**account)
|
||||
imappassEscaped = account["imappass"].replace("\\", "\\\\")
|
||||
mbsyncStr += MBSYNC_ACCOUNT.format(**account, secconf=secconf, imappassEscaped=imappassEscaped)
|
||||
mbsyncFilepath = os.path.join(os.path.expanduser('~'), '.config/mel/mbsyncrc')
|
||||
with open(mbsyncFilepath, 'w') as f:
|
||||
f.write(mbsyncStr)
|
||||
|
||||
# msmtp
|
||||
MSMTP_BEGIN = """defaults
|
||||
protocol smtp
|
||||
auth on
|
||||
tls_trust_file /etc/ssl/certs/ca-certificates.crt
|
||||
|
||||
"""
|
||||
|
||||
MSMTP_ACCOUNT = """account {account}
|
||||
from {from}
|
||||
user {smtpuser}
|
||||
password {smtppass}
|
||||
host {smtphost}
|
||||
port {smtpport}
|
||||
tls on
|
||||
|
||||
"""
|
||||
|
||||
msmtpStr = MSMTP_BEGIN
|
||||
for name, account in accounts.items():
|
||||
msmtpStr += MSMTP_ACCOUNT.format(**account)
|
||||
mbsyncFilepath = os.path.join(os.path.expanduser('~'), '.config/msmtp/config')
|
||||
with open(mbsyncFilepath, 'w') as f:
|
||||
f.write(msmtpStr)
|
||||
|
||||
|
||||
# notmuch
|
||||
NOTMUCH_BEGIN = """[database]
|
||||
path={storage}
|
||||
|
||||
[user]
|
||||
name={main[name]}
|
||||
primary_email={main[from]}
|
||||
other_email={other_email}
|
||||
|
||||
[new]
|
||||
tags=unprocessed;unread;
|
||||
ignore=
|
||||
|
||||
[search]
|
||||
exclude_tags=deleted;spam;
|
||||
|
||||
[maildir]
|
||||
synchronize_flags=true
|
||||
|
||||
[crypto]
|
||||
gpg_path=gpg
|
||||
|
||||
"""
|
||||
|
||||
other_email = mails.copy()
|
||||
other_email.remove(general["main"]["from"])
|
||||
other_email = ";".join(other_email)
|
||||
notmuchStr = NOTMUCH_BEGIN.format(**general, other_email=other_email)
|
||||
mbsyncFilepath = os.path.join(os.path.expanduser('~'), '.config/notmuch-config')
|
||||
with open(mbsyncFilepath, 'w') as f:
|
||||
f.write(notmuchStr)
|
||||
|
||||
# mutt (temp)
|
||||
|
||||
## mailboxes
|
||||
MAILBOXES_BEGIN = "mailboxes"
|
||||
|
||||
mailboxesStr = MAILBOXES_BEGIN
|
||||
for name, account in accounts.items():
|
||||
lines = "-" * (20 - len(name))
|
||||
mailboxesStr += f' "+{name}{lines}"'
|
||||
for root, dirs, files in os.walk(account['storage']):
|
||||
if "cur" not in dirs or "new" not in dirs or "tmp" not in dirs:
|
||||
continue
|
||||
assert root.startswith(storageFull)
|
||||
path = root[len(storageFull)+1:]
|
||||
mailboxesStr += f' "+{path}"'
|
||||
mailboxesStr += "\n"
|
||||
mailboxesFilepath = os.path.join(os.path.expanduser('~'), '.mutt/mailboxes')
|
||||
with open(mailboxesFilepath, 'w') as f:
|
||||
f.write(mailboxesStr)
|
||||
|
||||
## accounts
|
||||
# TODO html mails
|
||||
|
||||
MUTT_ACCOUNT = """set from = "{from}"
|
||||
set sendmail = "/usr/bin/msmtp -a {account}"
|
||||
set realname = "{name}"
|
||||
set spoolfile = "+{account}/{inboxfolder}"
|
||||
set mbox = "+{account}/{archivefolder}"
|
||||
set postponed = "+{account}/{draftsfolder}"
|
||||
set record = "+{account}/{sentfolder}"
|
||||
set trash = "+{account}/{trashfolder}"
|
||||
set signature = "~/.mutt/accounts/{account}.sig"
|
||||
set content_type = "text/plain"
|
||||
set sig_dashes = yes
|
||||
|
||||
color status {colormutt} default
|
||||
|
||||
macro index D \\
|
||||
"<clear-flag>N<save-message>+{account}/{trashfolder}<enter>" \\
|
||||
"move message to the trash"
|
||||
|
||||
macro index S \\
|
||||
"<clear-flag>N<save-message>+{account}/{spamfolder}<enter>" \\
|
||||
"mark message as spam"
|
||||
# vim: syntax=muttrc
|
||||
"""
|
||||
|
||||
for name, account in accounts.items():
|
||||
muttStr = MUTT_ACCOUNT.format(**account)
|
||||
|
||||
# Config
|
||||
muttFilepath = os.path.join(os.path.expanduser('~'), f'.mutt/accounts/{name}')
|
||||
with open(muttFilepath, 'w') as f:
|
||||
f.write(muttStr)
|
||||
|
||||
# Signature
|
||||
sigStr = account.get("sig", account.get("name", ""))
|
||||
sigFilepath = os.path.join(os.path.expanduser('~'), f'.mutt/accounts/{name}.sig')
|
||||
with open(sigFilepath, 'w') as f:
|
||||
f.write(sigStr)
|
||||
|
||||
MUTT_SELECTOR = """
|
||||
set folder = "{storage}"
|
||||
source ~/.mutt/mailboxes
|
||||
|
||||
source ~/.mutt/accounts/{main[account]}
|
||||
|
||||
{hooks}
|
||||
|
||||
source ~/.mutt/custom
|
||||
|
||||
# vim: syntax=muttrc
|
||||
"""
|
||||
|
||||
selectStr = ""
|
||||
hooks = ""
|
||||
for name, account in accounts.items():
|
||||
hooks += f"folder-hook {name}/* source ~/.mutt/accounts/{name}\n"
|
||||
selectStr += MUTT_SELECTOR.format(**general, hooks=hooks)
|
||||
selectFilepath = os.path.join(os.path.expanduser('~'), '.mutt/muttrc')
|
||||
with open(selectFilepath, 'w') as f:
|
||||
f.write(selectStr)
|
||||
|
||||
## Color
|
||||
for name, account in accounts.items():
|
||||
# Config
|
||||
colorFilepath = os.path.join(os.path.expanduser('~'), f'{general["storage"]}/{name}/color')
|
||||
with open(colorFilepath, 'w') as f:
|
||||
f.write(account['color'])
|
39
config/scripts/musiqueBof
Executable file
39
config/scripts/musiqueBof
Executable file
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import coloredlogs
|
||||
|
||||
coloredlogs.install(level='DEBUG', fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
MUSICS_FOLDER = os.path.join(os.path.expanduser("~"), "Musique")
|
||||
BOF_FOLDER = os.path.join(os.path.expanduser("~"), ".MusiqueBof")
|
||||
|
||||
for f in sys.argv[1:]:
|
||||
src = os.path.realpath(f)
|
||||
if not os.path.isfile(src):
|
||||
log.error("{} does not exists".format(src))
|
||||
continue
|
||||
|
||||
srcBase = None
|
||||
if src.startswith(MUSICS_FOLDER):
|
||||
srcBase = MUSICS_FOLDER
|
||||
dstBase = BOF_FOLDER
|
||||
elif src.startswith(BOF_FOLDER):
|
||||
srcBase = BOF_FOLDER
|
||||
dstBase = MUSIC_FOLDER
|
||||
else:
|
||||
log.error("{} not in any music folder".format(src))
|
||||
continue
|
||||
|
||||
common = os.path.relpath(src, srcBase)
|
||||
dst = os.path.join(dstBase, common)
|
||||
dstFolder = os.path.dirname(dst)
|
||||
|
||||
log.info("{} → {}".format(src, dst))
|
||||
os.makedirs(dstFolder, exist_ok=True)
|
||||
shutil.move(src, dst)
|
||||
|
2
config/scripts/newestFile
Executable file
2
config/scripts/newestFile
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
find -type f -printf '%T+ %p\n' | sort | tail "$@"
|
72
config/scripts/o
Executable file
72
config/scripts/o
Executable file
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import magic
|
||||
import subprocess
|
||||
import urllib.request
|
||||
import tempfile
|
||||
|
||||
# Getting what's needed
|
||||
path = sys.argv[1]
|
||||
|
||||
# Getting the MIME type
|
||||
ishttp = path.startswith('http')
|
||||
|
||||
buf = None
|
||||
if ishttp:
|
||||
buf = urllib.request.urlopen(path)
|
||||
chunk = buf.read(1024)
|
||||
mime = magic.from_buffer(chunk, mime=True)
|
||||
else:
|
||||
assert os.path.isfile(path), f"Not a file: {path}"
|
||||
path = os.path.realpath(path)
|
||||
mime = magic.from_file(path, mime=True)
|
||||
mime = tuple(mime.split('/'))
|
||||
assert len(mime) == 2
|
||||
|
||||
graphical = not not os.environ.get('DISPLAY')
|
||||
|
||||
# Some energumens
|
||||
if mime[0] == "application" and mime[1] in ("json", "javascript"):
|
||||
mime = ("text", mime[1])
|
||||
|
||||
# Determine stuff
|
||||
ex = None # Executable needed to open the file
|
||||
forcelocal = False # If we need to copy the file locally before opening it
|
||||
isterm = False # Executable should run in a terminal
|
||||
|
||||
if mime[0] == "text":
|
||||
if not ishttp:
|
||||
ex = os.environ.get('VISUAL' if graphical else 'EDITOR', None)
|
||||
isterm = True
|
||||
elif mime[0] == "image":
|
||||
ex = "feh"
|
||||
elif mime[0] in ("audio", "video"):
|
||||
ex = "mpv"
|
||||
isterm = True
|
||||
elif mime == ("application", "pdf"):
|
||||
ex = "llpp.inotify"
|
||||
forcelocal = True
|
||||
|
||||
# Open stuff
|
||||
tmp = None
|
||||
if ex:
|
||||
if forcelocal and ishttp:
|
||||
tmp = tempfile.NamedTemporaryFile(prefix='o')
|
||||
tmp.write(chunk)
|
||||
tmp.write(buf.read())
|
||||
path = tmp.name
|
||||
else:
|
||||
ex = 'xdg-open'
|
||||
if ishttp:
|
||||
ex = os.environ.get('BROWSER', ex)
|
||||
if buf:
|
||||
buf.close()
|
||||
|
||||
# TODO Launch a new terminal window for some
|
||||
|
||||
p = subprocess.run([ex, path])
|
||||
if tmp:
|
||||
tmp.close()
|
||||
sys.exit(p.returncode)
|
2
config/scripts/oldestFile
Executable file
2
config/scripts/oldestFile
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
find -type f -printf '%T+ %p\n' | sort | head "$@"
|
172
config/scripts/optimize
Executable file
172
config/scripts/optimize
Executable file
|
@ -0,0 +1,172 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Optimizes everything the script can find in a folder,
|
||||
# meaning it will compress files as much as possible,
|
||||
# without losing any data (verification will be done
|
||||
# in order to verify that no data has been done)
|
||||
# (executable)
|
||||
|
||||
# TODO Run in parallel
|
||||
# TODO Lots of dupplicated code there
|
||||
|
||||
dir=${1:-$PWD}
|
||||
total=$(mktemp)
|
||||
echo -n 0 > $total
|
||||
|
||||
function showtotal {
|
||||
echo "Total saved: $(cat "$total") bytes"
|
||||
rm $total
|
||||
exit
|
||||
}
|
||||
|
||||
trap showtotal SIGTERM SIGINT SIGFPE
|
||||
|
||||
function doReplace { # candidate original
|
||||
mv "$c" "$o"
|
||||
saved=$(($os - $cs))
|
||||
perc=$((100 * $saved / $os))
|
||||
echo "→ $os ⇒ $cs (saved $saved bytes, or ${perc}%)"
|
||||
newtotal=$(($(cat $total) + $saved))
|
||||
echo -n $newtotal > $total
|
||||
}
|
||||
|
||||
function replace { # candidate original
|
||||
c="$1"
|
||||
o="$2"
|
||||
|
||||
# File verifications
|
||||
if [ ! -f "$o" ]; then
|
||||
echo "→ Original is inexistant, skipping!"
|
||||
return
|
||||
fi
|
||||
if [ ! -f "$c" ]; then
|
||||
echo "→ Candidate is inexistant, skipping!"
|
||||
return
|
||||
fi
|
||||
|
||||
# Size verifications
|
||||
cs=$(wc -c "$c" | cut -d' ' -f1)
|
||||
os=$(wc -c "$o" | cut -d' ' -f1)
|
||||
if [ $cs -le 0 ]; then
|
||||
echo "→ Candidate is empty, skipping!"
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
if [ $cs -eq $os ]; then
|
||||
echo "→ Candidate weight the same, skipping."
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
if [ $cs -gt $os ]; then
|
||||
echo "→ Candidate is larger, skipping."
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
|
||||
doReplace "$c" "$o"
|
||||
}
|
||||
|
||||
function replaceImg { # candidate original
|
||||
# With bitmap verification
|
||||
|
||||
c="$1"
|
||||
o="$2"
|
||||
|
||||
# File verifications
|
||||
if [ ! -f "$o" ]; then
|
||||
echo "→ Original is inexistant, skipping!"
|
||||
return
|
||||
fi
|
||||
if [ ! -f "$c" ]; then
|
||||
echo "→ Candidate is inexistant, skipping!"
|
||||
return
|
||||
fi
|
||||
|
||||
# Size verifications
|
||||
cs=$(wc -c "$c" | cut -d' ' -f1)
|
||||
os=$(wc -c "$o" | cut -d' ' -f1)
|
||||
if [ $cs -le 0 ]; then
|
||||
echo "→ Candidate is empty, skipping!"
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
if [ $cs -eq $os ]; then
|
||||
echo "→ Candidate weight the same, skipping."
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
if [ $cs -gt $os ]; then
|
||||
echo "→ Candidate is larger, skipping."
|
||||
rm "$c"
|
||||
return
|
||||
fi
|
||||
|
||||
# Bitmap verification
|
||||
ppmc="$(mktemp --suffix .ppm)"
|
||||
ppmo="$(mktemp --suffix .ppm)"
|
||||
convert "$c" "$ppmc"
|
||||
convert "$o" "$ppmo"
|
||||
|
||||
if cmp --silent "$ppmo" "$ppmc"; then
|
||||
doReplace "$c" "$o"
|
||||
else
|
||||
echo "→ Candidate don't have the same bit map as original, skipping!"
|
||||
fi
|
||||
rm -f "$ppmc" "$ppmo" "$c"
|
||||
|
||||
}
|
||||
|
||||
# JPEG (requires jpegtran)
|
||||
while read image
|
||||
do
|
||||
if [ -z "$image" ]; then continue; fi
|
||||
echo Processing $image
|
||||
|
||||
prog=$(mktemp --suffix .jpg)
|
||||
jpegtran -copy all -progressive "$image" > "$prog"
|
||||
echo "→ Progressive done"
|
||||
progs=$(wc -c "$prog" | cut -d' ' -f1)
|
||||
replace "$prog" "$image"
|
||||
|
||||
|
||||
done <<< "$(find "$dir" -type f -iregex ".+.jpe?g$")"
|
||||
|
||||
# PNG (requires optipng)
|
||||
while read image
|
||||
do
|
||||
if [ -z "$image" ]; then continue; fi
|
||||
echo Processing $image
|
||||
|
||||
temp=$(mktemp --suffix .png)
|
||||
cp "$image" "$temp"
|
||||
optipng -quiet "$temp"
|
||||
echo "→ Optimize done"
|
||||
|
||||
replace "$temp" "$image"
|
||||
|
||||
done <<< "$(find "$dir" -type f -iname "*.png")"
|
||||
|
||||
# # SVG (requires scour)
|
||||
# while read image
|
||||
# do
|
||||
# if [ -z "$image" ]; then continue; fi
|
||||
# echo Processing $image
|
||||
#
|
||||
# temp=$(mktemp --suffix .svg)
|
||||
# scour --quiet "$image" "$temp" --no-line-breaks
|
||||
# echo "→ Optimize done"
|
||||
#
|
||||
# replaceImg "$temp" "$image"
|
||||
#
|
||||
# done <<< "$(find "$dir" -type f -iname "*.svg")"
|
||||
|
||||
# NOTE Explicitely disabled since:
|
||||
# - I only have ~50 MiB of SVG in TOTAL
|
||||
# - Most conversions are not image losseless
|
||||
# - Even when they are losseless, they are mostly worthless
|
||||
# - I might want to keep editor data and/or ids for some of them
|
||||
# So rather use scour explicitely when needed
|
||||
|
||||
cleandev
|
||||
|
||||
showtotal
|
1043
config/scripts/package-lock.json
generated
Normal file
1043
config/scripts/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
22
config/scripts/package.json
Normal file
22
config/scripts/package.json
Normal file
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"name": "geoffreyfrogeye-dotfiles-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Stores dependencies used for GeoffreyFrogeye's dotfiles scripts.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.frogeye.fr/geoffrey/dotfiles"
|
||||
},
|
||||
"author": "GeoffreyFrogeye",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"highlight.js": "^9.11.0",
|
||||
"html-pdf": "^2.1.0",
|
||||
"katex": "^0.7.1",
|
||||
"marked": "^0.3.6",
|
||||
"yargs": "^8.0.1"
|
||||
}
|
||||
}
|
8
config/scripts/pdfpages
Executable file
8
config/scripts/pdfpages
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
# From https://stackoverflow.com/a/14736593
|
||||
for FILE in "$@"
|
||||
do
|
||||
printf "$FILE: "
|
||||
pdftk "$FILE" dump_data | grep NumberOfPages | awk '{print $2}'
|
||||
done
|
49
config/scripts/pdfrename
Executable file
49
config/scripts/pdfrename
Executable file
|
@ -0,0 +1,49 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Change the title of a PDF file
|
||||
|
||||
if [[ -z "$1" || -z "$2" ]]; then
|
||||
echo "Usage: $0 FILE TITLE [CREATOR [PRODUCER]]"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " FILE Path to the PDF document"
|
||||
echo " TITLE Content of the Title tag"
|
||||
echo " CREATOR Content of the Creator tag"
|
||||
echo " PRODUCER Title to give"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
file="$1"
|
||||
title="$2"
|
||||
creator="$3"
|
||||
producer="$4"
|
||||
|
||||
if [ ! -f "$1" ]; then
|
||||
echo "No such file or directory: $1" >> /dev/stderr
|
||||
exit 2
|
||||
fi
|
||||
|
||||
instructions=$(mktemp)
|
||||
|
||||
echo "InfoBegin" >> "$instructions"
|
||||
echo "InfoKey: Title" >> "$instructions"
|
||||
echo "InfoValue: $title" >> "$instructions"
|
||||
if [ -n "$creator" ]; then
|
||||
echo "InfoBegin" >> "$instructions"
|
||||
echo "InfoKey: Creator" >> "$instructions"
|
||||
echo "InfoValue: $creator" >> "$instructions"
|
||||
fi
|
||||
if [ -n "$producer" ]; then
|
||||
echo "InfoBegin" >> "$instructions"
|
||||
echo "InfoKey: Producer" >> "$instructions"
|
||||
echo "InfoValue: $producer" >> "$instructions"
|
||||
fi
|
||||
|
||||
copy=$(mktemp)
|
||||
cp "$file" "$copy"
|
||||
|
||||
pdftk "$copy" update_info "$instructions" output "$file"
|
||||
|
||||
rm "$instructions" "$copy"
|
||||
|
||||
|
103
config/scripts/proxy
Executable file
103
config/scripts/proxy
Executable file
|
@ -0,0 +1,103 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Allows easy manipulation of the proxy variables
|
||||
|
||||
function proxy_set_help {
|
||||
echo "Usage: $0 set ADDRESS"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " ADDRESS Address of the proxy"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo ' eval "$(proxy set http://proxy.mycompany.com:3128/)"'
|
||||
return 0
|
||||
}
|
||||
|
||||
function proxy_set {
|
||||
if [ -z $1 ]; then
|
||||
proxy_set_help
|
||||
return 1
|
||||
fi
|
||||
echo "export http_proxy='$1'"
|
||||
echo "export https_proxy='$1'"
|
||||
echo "export ftp_proxy='$1'"
|
||||
echo "export rsync_proxy='$1'"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function proxy_setup_help {
|
||||
echo "Usage: $0 setup"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " proxy_set # Then eval the output"
|
||||
return 0
|
||||
}
|
||||
function proxy_setup {
|
||||
export no_proxy="localhost,127.0.0.1,localaddress,.localdomain.com"
|
||||
|
||||
if (( $# > 0 )); then
|
||||
valid=$(echo $@ | sed -n 's/\([0-9]\{1,3\}.\)\{4\}:\([0-9]\+\)/&/p')
|
||||
if [[ $valid != $@ ]]; then
|
||||
>&2 echo "Invalid address"
|
||||
return 1
|
||||
fi
|
||||
proxy_set "http://$1/"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo -n "User: "; read username
|
||||
if [[ $username != "" ]]; then
|
||||
echo -n "Password: "
|
||||
read -es password
|
||||
local pre="$username:$password@"
|
||||
fi
|
||||
|
||||
echo -n "Server: "; read server
|
||||
echo -n "Port: "; read port
|
||||
proxy_set "http://$pre$server:$port/"
|
||||
return 0
|
||||
}
|
||||
|
||||
function proxy_off_help {
|
||||
echo "Usage: $0 off"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo ' eval $(proxy off)'
|
||||
return 0
|
||||
}
|
||||
function proxy_off {
|
||||
echo 'unset http_proxy'
|
||||
echo 'unset https_proxy'
|
||||
echo 'unset ftp_proxy'
|
||||
echo 'unset rsync_proxy'
|
||||
return 0
|
||||
}
|
||||
|
||||
function proxy_help {
|
||||
command="$1"
|
||||
if [ -n "$command" ]; then
|
||||
if type "proxy_${command}_help" &> /dev/null; then
|
||||
shift
|
||||
"proxy_${command}_help" "$@"
|
||||
return $?
|
||||
fi
|
||||
fi
|
||||
echo "Usage: $0 COMMAND"
|
||||
echo
|
||||
echo "Commands:"
|
||||
echo " setup Interactively setup proxy"
|
||||
echo " set Set proxy from address"
|
||||
echo " off Turn off proxy"
|
||||
echo " help Get help with commands"
|
||||
return 0
|
||||
}
|
||||
|
||||
# MAIN
|
||||
command="$1"
|
||||
shift
|
||||
if type "proxy_$command" &> /dev/null; then
|
||||
"proxy_$command" "$@"
|
||||
else
|
||||
proxy_help
|
||||
fi
|
||||
|
80
config/scripts/pushToTalk
Executable file
80
config/scripts/pushToTalk
Executable file
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import pulsectl
|
||||
|
||||
from Xlib import X, XK, display
|
||||
from Xlib.ext import record
|
||||
from Xlib.protocol import rq
|
||||
|
||||
KEY = XK.XK_F7
|
||||
|
||||
def mute(state):
|
||||
with pulsectl.Pulse('list-source') as pulse:
|
||||
for source in pulse.source_list():
|
||||
if source.port_active:
|
||||
if source.mute != state:
|
||||
pulse.mute(source, state)
|
||||
print(f"{source.name} {'un' if not state else ''}muted")
|
||||
|
||||
mute(True)
|
||||
|
||||
local_dpy = display.Display()
|
||||
record_dpy = display.Display()
|
||||
|
||||
|
||||
def record_callback(reply):
|
||||
if reply.category != record.FromServer:
|
||||
return
|
||||
if reply.client_swapped:
|
||||
print("* received swapped protocol data, cowardly ignored")
|
||||
return
|
||||
if not len(reply.data) or reply.data[0] < 2:
|
||||
# not an event
|
||||
return
|
||||
|
||||
data = reply.data
|
||||
while len(data):
|
||||
event, data = rq.EventField(None).parse_binary_value(
|
||||
data, record_dpy.display, None, None)
|
||||
|
||||
if event.type in [X.KeyPress, X.KeyRelease]:
|
||||
keysym = local_dpy.keycode_to_keysym(event.detail, 0)
|
||||
|
||||
if keysym == KEY:
|
||||
mute(event.type == X.KeyRelease)
|
||||
|
||||
# Check if the extension is present
|
||||
if not record_dpy.has_extension("RECORD"):
|
||||
print("RECORD extension not found")
|
||||
sys.exit(1)
|
||||
r = record_dpy.record_get_version(0, 0)
|
||||
print("RECORD extension version %d.%d" %
|
||||
(r.major_version, r.minor_version))
|
||||
|
||||
# Create a recording context; we only want key and mouse events
|
||||
ctx = record_dpy.record_create_context(
|
||||
0,
|
||||
[record.AllClients],
|
||||
[{
|
||||
'core_requests': (0, 0),
|
||||
'core_replies': (0, 0),
|
||||
'ext_requests': (0, 0, 0, 0),
|
||||
'ext_replies': (0, 0, 0, 0),
|
||||
'delivered_events': (0, 0),
|
||||
'device_events': (X.KeyPress, X.MotionNotify),
|
||||
'errors': (0, 0),
|
||||
'client_started': False,
|
||||
'client_died': False,
|
||||
}])
|
||||
|
||||
# Enable the context; this only returns after a call to record_disable_context,
|
||||
# while calling the callback function in the meantime
|
||||
try:
|
||||
record_dpy.record_enable_context(ctx, record_callback)
|
||||
except KeyboardInterrupt:
|
||||
local_dpy.record_disable_context(ctx)
|
||||
local_dpy.flush()
|
||||
|
||||
# Finally free the context
|
||||
record_dpy.record_free_context(ctx)
|
9
config/scripts/pw
Executable file
9
config/scripts/pw
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Generate strong enough password(s)
|
||||
|
||||
# This generates a password with ln((26*2+10)**32)/ln(2) ≅ 190 bits of entropy,
|
||||
# which is a bit above the recommended standars (128 bits) while still having
|
||||
# a 0 probability that the service will break because of incompatible character
|
||||
|
||||
pwgen 32 -s
|
2
config/scripts/rankmirrors
Executable file
2
config/scripts/rankmirrors
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
egrep -o 'Server = .+' /etc/pacman.d/mirrorlist.pacnew | /usr/bin/rankmirrors-n6 - > /etc/pacman.d/mirrorlist
|
12
config/scripts/remcrlf
Executable file
12
config/scripts/remcrlf
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Removes CRLF (^M or \r) from a file
|
||||
|
||||
#sed -e "s/^M//" "$1" -i
|
||||
|
||||
tmpfile=$(mktemp)
|
||||
|
||||
cp "$1" "$tmpfile"
|
||||
tr -d '\r' < "$tmpfile" > "$1"
|
||||
rm "$tmpfile"
|
||||
|
25
config/scripts/rep
Executable file
25
config/scripts/rep
Executable file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Moves a file to another place and put a symbolic link in place
|
||||
|
||||
function rep_help {
|
||||
echo "Usage: $0 SOURCE DEST"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " SOURCE File to be moved"
|
||||
echo " DEST Where to be moved"
|
||||
return 0
|
||||
}
|
||||
|
||||
mv "$1" "$2"
|
||||
ln -s "$2" "$1"
|
||||
|
||||
# MAIN
|
||||
command="$1"
|
||||
shift
|
||||
if type "rep_$command" &> /dev/null; then
|
||||
"rep_$command" "$@"
|
||||
else
|
||||
rep_help
|
||||
fi
|
||||
|
68
config/scripts/replayGain
Executable file
68
config/scripts/replayGain
Executable file
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Normalisation is done at the default of each program,
|
||||
# which is usually -89.0 dB
|
||||
|
||||
import os
|
||||
import coloredlogs
|
||||
import logging
|
||||
import r128gain
|
||||
import sys
|
||||
|
||||
coloredlogs.install(level='DEBUG', fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
# TODO Remove debug
|
||||
|
||||
# Constants
|
||||
FORCE = '-f' in sys.argv
|
||||
if FORCE:
|
||||
sys.argv.remove('-f')
|
||||
SOURCE_FOLDER = os.path.realpath(sys.argv[1]) if len(sys.argv) >= 2 else os.path.join(os.path.expanduser("~"), "Musiques")
|
||||
|
||||
def isMusic(f):
|
||||
ext = os.path.splitext(f)[1][1:].lower()
|
||||
return ext in r128gain.AUDIO_EXTENSIONS
|
||||
|
||||
# Get album paths
|
||||
log.info("Listing albums and tracks")
|
||||
albums = set()
|
||||
singleFiles = set()
|
||||
for root, dirs, files in os.walk(SOURCE_FOLDER):
|
||||
|
||||
relRoot = os.path.relpath(root, SOURCE_FOLDER)
|
||||
|
||||
head, tail = os.path.split(relRoot)
|
||||
# 1 component in the path: save files path as single
|
||||
if not len(head):
|
||||
for f in files:
|
||||
if isMusic(f):
|
||||
fullPath = os.path.join(root, f)
|
||||
singleFiles.add(fullPath)
|
||||
head, tail = os.path.split(head)
|
||||
if len(head):
|
||||
continue
|
||||
# 2 components in the path: save album path
|
||||
albums.add(root)
|
||||
|
||||
log.info("Processing single files")
|
||||
# r128gain.process(list(singleFiles), album_gain=False, skip_tagged=not FORCE, report=True)
|
||||
for album in albums:
|
||||
albumName = os.path.relpath(album, SOURCE_FOLDER)
|
||||
log.info("Processing album {}".format(albumName))
|
||||
|
||||
musicFiles = set()
|
||||
for root, dirs, files in os.walk(album):
|
||||
for f in files:
|
||||
if isMusic(f):
|
||||
fullPath = os.path.join(root, f)
|
||||
musicFiles.add(fullPath)
|
||||
|
||||
# print(musicFiles)
|
||||
if not len(musicFiles):
|
||||
continue
|
||||
r128gain.process(list(musicFiles), album_gain=True, skip_tagged=not FORCE, report=True)
|
||||
print("==============================")
|
||||
|
||||
|
||||
|
23
config/scripts/rmf
Executable file
23
config/scripts/rmf
Executable file
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Rename sync-conflict files to normal files
|
||||
|
||||
# WARNING Does not check for conclicts
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
for root, dirs, files in os.walk('.'):
|
||||
for f in files:
|
||||
if '.sync-conflict' not in f:
|
||||
continue
|
||||
nf = re.sub('.sync-conflict-\d{8}-\d{6}-\w{7}', '', f)
|
||||
F = os.path.join(root, f)
|
||||
NF = os.path.join(root, nf)
|
||||
if os.path.exists(NF):
|
||||
print(f"'{F}' → '{NF}': file already exists")
|
||||
else:
|
||||
print(f"'{F}' → '{NF}': done")
|
||||
os.rename(F, NF)
|
||||
|
||||
|
2
config/scripts/rms
Executable file
2
config/scripts/rms
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env bash
|
||||
find . -name "*.sync-conflict-*" -delete
|
47
config/scripts/sedrename
Executable file
47
config/scripts/sedrename
Executable file
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Rename a list of files with a sed pattern
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 PATTERN [-d] < filelist"
|
||||
echo
|
||||
echo "Arguments:"
|
||||
echo " PATTERN Sed pattern to apply"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " -d Dry run"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [[ -z "$1" ]]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
pattern="$1"
|
||||
|
||||
dry=1
|
||||
if [[ -n "$2" ]]; then
|
||||
if [[ "$2" = '-d' ]]; then
|
||||
dry=0
|
||||
else
|
||||
usage
|
||||
fi
|
||||
fi
|
||||
|
||||
while read src
|
||||
do
|
||||
dst="$(echo "$src" | sed "$pattern")"
|
||||
if [[ $? != 0 ]]; then
|
||||
echo "ERREUR Invalid sed pattern"
|
||||
exit 2
|
||||
fi
|
||||
if [[ $dry == 0 ]]; then
|
||||
echo "$src" → "$dst"
|
||||
else
|
||||
mv -- "$src" "$dst"
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
|
||||
|
4
config/scripts/showKeyboardLayout
Executable file
4
config/scripts/showKeyboardLayout
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/bash
|
||||
layout=`setxkbmap -query | grep layout | tr -s ' ' | cut -d ' ' -f2`
|
||||
variant=`setxkbmap -query | grep variant | tr -s ' ' | cut -d ' ' -f2`
|
||||
gkbd-keyboard-display -l ${layout}$'\t'${variant}
|
20
config/scripts/syncthingRestore
Executable file
20
config/scripts/syncthingRestore
Executable file
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
curDir = os.path.realpath('.')
|
||||
assert '.stversions/' in curDir
|
||||
tgDir = curDir.replace('.stversions/', '')
|
||||
|
||||
|
||||
for root, dirs, files in os.walk(curDir):
|
||||
dstRoot = root.replace(curDir, tgDir)
|
||||
os.makedirs(dstRoot, exist_ok=True)
|
||||
for f in files:
|
||||
srcPath = os.path.join(root, f)
|
||||
dstF = f
|
||||
dstPath = os.path.join(dstRoot, dstF)
|
||||
print(f"{srcPath} → {dstPath}")
|
||||
shutil.copy2(srcPath, dstPath)
|
||||
|
17
config/scripts/tagCreatorPhotos
Executable file
17
config/scripts/tagCreatorPhotos
Executable file
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import piexif
|
||||
|
||||
assert len(sys.argv) >= 3, "Usage {} CREATOR FILENAMES...".format(sys.argv[0])
|
||||
creator = sys.argv[1]
|
||||
filenames = sys.argv[2:]
|
||||
|
||||
for filename in filenames:
|
||||
assert os.path.isfile(filename)
|
||||
exifDict = piexif.load(filename)
|
||||
exifDict['0th'][piexif.ImageIFD.Copyright] = creator.encode()
|
||||
exifBytes = piexif.dump(exifDict)
|
||||
piexif.insert(exifBytes, filename)
|
||||
|
65
config/scripts/ter
Executable file
65
config/scripts/ter
Executable file
|
@ -0,0 +1,65 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
from math import inf
|
||||
|
||||
gares = sys.argv[1:]
|
||||
|
||||
N = len(gares)
|
||||
|
||||
if N < 2:
|
||||
print("Ben reste chez toi alors.")
|
||||
sys.exit(1)
|
||||
|
||||
def trajet_str(a, b):
|
||||
return f"{gares[a]} → {gares[b]}"
|
||||
|
||||
def chemin_str(stack):
|
||||
return ", ".join([trajet_str(stack[i], stack[i+1]) for i in range(len(stack)-1)])
|
||||
|
||||
# Demande des prix des trajets
|
||||
|
||||
prices = dict()
|
||||
|
||||
for i in range(N):
|
||||
for j in range(N-1, i, -1):
|
||||
p = None
|
||||
while not isinstance(p, float):
|
||||
try:
|
||||
p = float(input(f"Prix du trajet {trajet_str(i, j)} ? ").replace(',', '.'))
|
||||
except ValueError:
|
||||
print("C'est pas un prix ça !")
|
||||
if i not in prices:
|
||||
prices[i] = dict()
|
||||
prices[i][j] = float(p)
|
||||
|
||||
# Calcul des prix des chemins
|
||||
|
||||
miniPrice = +inf
|
||||
miniStack = None
|
||||
maxiPrice = -inf
|
||||
maxiStack = None
|
||||
|
||||
def register_path(stack):
|
||||
price = sum([prices[stack[i]][stack[i+1]]for i in range(len(stack)-1)])
|
||||
|
||||
global miniPrice, maxiPrice, miniStack, maxiStack
|
||||
if price < miniPrice:
|
||||
miniPrice = price
|
||||
miniStack = stack.copy()
|
||||
if price > maxiPrice:
|
||||
maxiPrice = price
|
||||
maxiStack = stack.copy()
|
||||
print(f"{chemin_str(stack)} = {price:.2f} €")
|
||||
|
||||
stack = [0]
|
||||
while stack[0] == 0:
|
||||
if stack[-1] >= N - 1:
|
||||
register_path(stack)
|
||||
stack.pop()
|
||||
stack[-1] += 1
|
||||
else:
|
||||
stack.append(stack[-1]+1)
|
||||
|
||||
print(f"Prix minimum: {chemin_str(miniStack)} = {miniPrice:.2f} €")
|
||||
print(f"Prix maximum: {chemin_str(maxiStack)} = {maxiPrice:.2f} €")
|
21
config/scripts/totask
Executable file
21
config/scripts/totask
Executable file
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import taskw
|
||||
import sys
|
||||
|
||||
tw = taskw.TaskWarrior()
|
||||
|
||||
total = 0
|
||||
number = 0
|
||||
statuses = set()
|
||||
for task in tw._get_task_objects(*sys.argv[1:], 'export'):
|
||||
statuses.add(task['status'])
|
||||
if task['status'] not in {'pending', 'waiting'}:
|
||||
continue
|
||||
urgency = task['urgency']
|
||||
if urgency <= 0:
|
||||
continue
|
||||
total += urgency
|
||||
number += 1
|
||||
|
||||
print(f"Σ{total:.3f} #{number}")
|
2
config/scripts/tracefiles
Executable file
2
config/scripts/tracefiles
Executable file
|
@ -0,0 +1,2 @@
|
|||
#!/usr/bin/env sh
|
||||
strace -f -t -e trace=file
|
20
config/scripts/transfer
Executable file
20
config/scripts/transfer
Executable file
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
echo -e "No arguments specified. Usage:\necho transfer /tmp/test.md\ncat /tmp/test.md | transfer test.md"
|
||||
return 1
|
||||
fi
|
||||
|
||||
tmpfile=$( mktemp -t transferXXX )
|
||||
|
||||
if tty -s
|
||||
then
|
||||
basefile=$(basename "$1" | sed -e 's/[^a-zA-Z0-9._-]/-/g')
|
||||
curl --progress-bar --upload-file "$1" "https://transfer.sh/$basefile" >> $tmpfile
|
||||
else
|
||||
curl --progress-bar --upload-file "-" "https://transfer.sh/$1" >> $tmpfile
|
||||
fi
|
||||
|
||||
cat $tmpfile
|
||||
rm -f $tmpfile
|
24
config/scripts/tunnel
Executable file
24
config/scripts/tunnel
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Dynamically determines if the ssh connection
|
||||
# is to be proxied through `proxytunnel`
|
||||
# To be used with ssh_config ProxyCommand
|
||||
|
||||
host="$1"
|
||||
port="$2"
|
||||
|
||||
if [ -z "$http_proxy" ]; then
|
||||
socat "TCP:$host:$port" -
|
||||
else
|
||||
proxy=$(echo "$http_proxy" | sed 's/^https\?:\/\///' | sed 's/\/$//')
|
||||
port=443 # Most won't want this
|
||||
echo "$proxy" | grep '@'
|
||||
if [ $? == 0 ]; then
|
||||
user=$(echo $proxy | cut -d '@' -f 2)
|
||||
proxy=$(echo $proxy | cut -d '@' -f 1)
|
||||
proxytunnel -p $proxy -P $user -d $host:$port
|
||||
else
|
||||
proxytunnel -p $proxy -d $host:$port
|
||||
fi
|
||||
fi
|
||||
|
34
config/scripts/unziptree
Executable file
34
config/scripts/unziptree
Executable file
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
for root, dirs, files in os.walk("."):
|
||||
for name in files:
|
||||
base, ext = os.path.splitext(name)
|
||||
if name.endswith(".zip"):
|
||||
cmd = ["unzip"]
|
||||
elif name.endswith(".7z"):
|
||||
cmd = ["7z", "e"]
|
||||
elif name.endswith(".rar"):
|
||||
cmd = ["unrar", "x"]
|
||||
elif name.endswith('.tar'):
|
||||
cmd = ["tar", "xf"]
|
||||
elif name.endswith('.tar.gz'):
|
||||
cmd = ["tar", "xzf"]
|
||||
elif name.endswith('.tar.xz'):
|
||||
cmd = ["tar", "xJf"]
|
||||
else:
|
||||
continue
|
||||
|
||||
filepath = os.path.join(root, name)
|
||||
dirpath = os.path.join(root, base)
|
||||
print(filepath)
|
||||
|
||||
os.mkdir(dirpath)
|
||||
|
||||
cmd.append(os.path.realpath(filepath))
|
||||
r = subprocess.run(cmd, cwd=dirpath)
|
||||
r.check_returncode()
|
||||
|
||||
os.unlink(filepath)
|
121
config/scripts/updateCompressedMusic
Executable file
121
config/scripts/updateCompressedMusic
Executable file
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import progressbar
|
||||
import logging
|
||||
import coloredlogs
|
||||
|
||||
coloredlogs.install(level='DEBUG', fmt='%(levelname)s %(message)s')
|
||||
log = logging.getLogger()
|
||||
|
||||
# Constants
|
||||
SOURCE_FOLDER = os.path.join(os.path.expanduser("~"), "Musiques")
|
||||
OUTPUT_FOLDER = os.path.join(os.path.expanduser("~"), ".musicCompressed")
|
||||
CONVERSIONS = {"flac": "opus"}
|
||||
FORBIDDEN_EXTENSIONS = ["jpg", "pdf", "ffs_db"]
|
||||
FORGIVEN_FILENAMES = ["cover.jpg", "front.jpg"]
|
||||
IGNORED_EMPTY_FOLDER = [".stfolder"]
|
||||
|
||||
# TODO FEAT Make the directory structure the same as the base one and
|
||||
# remove IGNORED_EMPTY_FOLDER variable
|
||||
|
||||
# Listing files
|
||||
log.info("Listing files")
|
||||
sourceFiles = dict()
|
||||
for root, dirs, files in os.walk(SOURCE_FOLDER):
|
||||
for f in files:
|
||||
fullPath = os.path.join(root, f)
|
||||
path = os.path.relpath(fullPath, SOURCE_FOLDER)
|
||||
sourceFiles[path] = os.path.getctime(fullPath)
|
||||
|
||||
outputFiles = dict()
|
||||
for root, dirs, files in os.walk(OUTPUT_FOLDER):
|
||||
for f in files:
|
||||
fullPath = os.path.join(root, f)
|
||||
path = os.path.relpath(fullPath, OUTPUT_FOLDER)
|
||||
outputFiles[path] = os.path.getctime(fullPath)
|
||||
|
||||
# Sorting files
|
||||
remainingConversions = dict()
|
||||
extraFiles = set(outputFiles.keys())
|
||||
|
||||
|
||||
def convertPath(path):
|
||||
filename, extension = os.path.splitext(path)
|
||||
extension = extension[1:].lower()
|
||||
# If the extension isn't allowed
|
||||
if extension in FORBIDDEN_EXTENSIONS:
|
||||
basename = os.path.basename(path)
|
||||
# And the filename is not an exception
|
||||
if basename not in FORGIVEN_FILENAMES:
|
||||
# This file shouldn't be copied nor converted
|
||||
return False
|
||||
# If this needs a conversion
|
||||
elif extension in CONVERSIONS:
|
||||
extension = CONVERSIONS[extension]
|
||||
return filename + "." + extension
|
||||
# In all other case, this is a simple copy
|
||||
return path
|
||||
|
||||
|
||||
log.info("Determining action over {} files".format(len(sourceFiles)))
|
||||
for sourceFile in sourceFiles:
|
||||
outputFile = convertPath(sourceFile)
|
||||
# If the file should not be converted, do nothing
|
||||
if outputFile == False:
|
||||
continue
|
||||
# If the file already has something as an output
|
||||
elif outputFile in outputFiles:
|
||||
extraFiles.remove(outputFile)
|
||||
# If the output file is newer than the source file, do not initiate a
|
||||
# conversion
|
||||
if outputFiles[outputFile] >= sourceFiles[sourceFile]:
|
||||
continue
|
||||
# If the file needs to be converted, do it
|
||||
remainingConversions[sourceFile] = outputFile
|
||||
|
||||
log.debug("{} actions will need to be taken".format(len(remainingConversions)))
|
||||
log.info("Copying files that do not require a conversion")
|
||||
conversions = set()
|
||||
for sourceFile in remainingConversions:
|
||||
outputFile = remainingConversions[sourceFile]
|
||||
|
||||
# Creating folder if it doesn't exists
|
||||
fullOutputFile = os.path.join(OUTPUT_FOLDER, outputFile)
|
||||
fullOutputDir = os.path.dirname(fullOutputFile)
|
||||
os.makedirs(fullOutputDir, exist_ok=True)
|
||||
|
||||
# Converting
|
||||
fullSourceFile = os.path.join(SOURCE_FOLDER, sourceFile)
|
||||
if sourceFile == outputFile:
|
||||
log.debug('{} → {}'.format(fullSourceFile, fullOutputFile))
|
||||
if os.path.isfile(fullOutputFile):
|
||||
os.remove(fullOutputFile)
|
||||
os.link(fullSourceFile, fullOutputFile)
|
||||
else:
|
||||
conversions.add((fullSourceFile, fullOutputFile))
|
||||
|
||||
log.info("Removing extra files")
|
||||
for extraFile in extraFiles:
|
||||
fullExtraFile = os.path.join(OUTPUT_FOLDER, extraFile)
|
||||
log.debug('× {}'.format(fullExtraFile))
|
||||
os.remove(fullExtraFile)
|
||||
|
||||
log.info("Listing files that will be converted")
|
||||
for fullSourceFile, fullOutputFile in conversions:
|
||||
log.debug('{} ⇒ {}'.format(fullSourceFile, fullOutputFile))
|
||||
|
||||
log.info("Converting files")
|
||||
for fullSourceFile, fullOutputFile in progressbar.progressbar(conversions):
|
||||
cmd = ["ffmpeg", "-y", "-i", fullSourceFile, "-c:a", "libopus",
|
||||
"-movflags", "+faststart", "-b:a", "128k", "-vbr", "on",
|
||||
"-compression_level", "10", fullOutputFile]
|
||||
subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
|
||||
# Removing empty dirs
|
||||
for root, dirs, files in os.walk(OUTPUT_FOLDER):
|
||||
if not dirs and not files:
|
||||
dirBasename = os.path.basename(root)
|
||||
if dirBasename not in IGNORED_EMPTY_FOLDER:
|
||||
os.rmdir(root)
|
3
config/scripts/updatedate
Executable file
3
config/scripts/updatedate
Executable file
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo ssh "$1" sudo date --set="'$(date -R)'"
|
20
config/scripts/vidcmp
Executable file
20
config/scripts/vidcmp
Executable file
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Compresses video using FFMPEG using
|
||||
# FFMPEG's reasonable default settings
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
files = sys.argv[1:]
|
||||
|
||||
remove = False
|
||||
if '-r' in files:
|
||||
files.remove('-r')
|
||||
remove = True
|
||||
|
||||
for f in files:
|
||||
print(os.path.splitext(f))
|
||||
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue