Reorganizing and added install-program fix

This commit is contained in:
Brian Zalewski 2023-08-26 07:45:14 -04:00 committed by GitHub
parent fc88131bba
commit 8a56619bf0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 2135 additions and 389 deletions

View file

@ -21,7 +21,7 @@
- https://github.com/gotify/server
- https://github.com/typicode/lowdb
- https://github.com/sindresorhus/execa
- [Title](https://github.com/mde/ejs)
- https://github.com/mde/ejs
## System

View file

@ -28,18 +28,10 @@ This page outlines various projects and tasks that we are currently working on.
- https://github.com/containers/toolbox consider for p10k.zsh file
- Figure out where Vector service fits in
- Figure out if Squid can be used to improve web surfing speed
- https://github.com/mumoshu/variant (With Task)
- https://github.com/marshyski/quick-secure
- Consider leveraging a CLI builder powered by structured data like [https://github.com/mumoshu/variant](Variant) and / or [https://github.com/mumoshu/variant2](Variant2)
- Consider implementing a tool like [https://github.com/marshyski/quick-secure](QuickSecure) that will ensure proper permissions on boot
- https://www.haskell.org/ghcup/install/#how-to-install
- https://github.com/material-shell/material-shell
- https://github.com/arxanas/git-branchless
- https://github.com/mumoshu/variant2
- https://github.com/burnison/tasksync
- https://github.com/Infisical/infisical
- https://github.com/xwmx/nb
- https://github.com/psychic-api/psychic
- https://github.com/pimutils/vdirsyncer
- https://github.com/librevault/librevault
## Upstream
@ -86,6 +78,7 @@ The following links include software that need to be reviewed before including t
The following items are Docker containers that we may want to include as default containers deployed in our system.
- https://github.com/Infisical/infisical
- https://github.com/highlight/highlight
- https://github.com/jitsi/jitsi-videobridge
- https://github.com/gitlabhq/gitlabhq

View file

@ -7,6 +7,7 @@ githubLocation: https://github.com/megabyte-labs/install.doctor/blob/master/home
scriptLocation: https://github.com/megabyte-labs/install.doctor/raw/master/home/dot_config/shell/profile.sh.tmpl
repoLocation: home/dot_config/shell/profile.sh.tmpl
---
# Shared Profile
Main shell profile that is used to combine the shared profile configurations that are used by both the `~/.bashrc` and `~/.zshrc` files
@ -16,8 +17,6 @@ Main shell profile that is used to combine the shared profile configurations tha
This script is included by `~/.bashrc` and `~/.zshrc` to include imports and settings that are common to both the Bash
and ZSH shells.
## Source Code
```
@ -114,8 +113,8 @@ if [ "$BASH_SUPPORT" = 'true' ]; then
fi
### fzf-git
#if [ -f "$HOME/.local/scripts/fzf-git.bash" ]; then
# . "$HOME/.local/scripts/fzf-git.bash"
# if [ -f "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-git.bash" ]; then
# . "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-git.bash"
# fi
### git-fuzzy
@ -143,8 +142,8 @@ fi
# fi
### fzf-tmux
#if [ -f "$HOME/.local/scripts/fzf-tmux.bash" ]; then
# . "$HOME/.local/scripts/fzf-tmux.bash"
#if [ -f "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-tmux.bash" ]; then
# . "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-tmux.bash"
#fi
### McFly

View file

@ -365,6 +365,7 @@ softwareGroups:
- gopass
- grex
- gron
- has
- handlr
- helix
- hexyl
@ -394,6 +395,7 @@ softwareGroups:
- resume
- s-search
- sad
- safe-rm
- search-gpt
- shml
- shx
@ -410,6 +412,7 @@ softwareGroups:
- timewarrior
- tmuxinator
- up
- vdirsyncer
- wallpaper-cli
- whereami
- wipe-modules
@ -599,6 +602,7 @@ softwareGroups:
- git
- gitql
- git-bug
- git-branchless
- git-extras
- git-filter-repo
- git-jump

View file

@ -1,66 +1,21 @@
{{- $refreshPeriod := "240h" }}
######################################
### Security Certificates ############
######################################
### Git Template
# [".config/git/template/_/husky.sh"]
# type = "file"
# url = "https://github.com/typicode/husky/raw/main/husky.sh"
# refreshPeriod = "{{ $refreshPeriod }}"
### CloudFlare
[".local/etc/ssl/cloudflare/Cloudflare_CA.crt"]
### Rundeck
[".local/system/src/var/lib/rundeck/libext/ansible-plugin-3.2.2.jar"]
type = "file"
url = "https://developers.cloudflare.com/cloudflare-one/static/documentation/connections/Cloudflare_CA.crt"
[".local/etc/ssl/cloudflare/Cloudflare_CA.pem"]
type = "file"
url = "https://developers.cloudflare.com/cloudflare-one/static/documentation/connections/Cloudflare_CA.pem"
### cURL / Google Cloud SDK
[".local/etc/ssl/curl/cacert.pem"]
type = "file"
url = "https://curl.se/ca/cacert.pem"
### GPG
[".gnupg/gpg.conf"]
type = "file"
url = "https://raw.githubusercontent.com/drduh/config/master/gpg.conf"
### Vagrant
[".ssh/authorized_keys.vagrant"]
type = "file"
url = "https://raw.githubusercontent.com/hashicorp/vagrant/main/keys/vagrant.pub"
url = "https://github.com/rundeck-plugins/ansible-plugin/releases/download/v3.2.2/ansible-plugin-3.2.2.jar"
refreshPeriod = "{{ $refreshPeriod }}"
### Update scripts
[".local/bin/update"]
### Chef Bento
[".local/src/bento"]
type = "git-repo"
url = "https://github.com/UpdateCommand/update.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
refreshPeriod = "{{ $refreshPeriod }}"
### Betelgeuse Theme
[".local/src/betelgeuse"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/misc/betelgeuse.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/betelgeuse"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/misc/betelgeuse.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/candy-icons"]
type = "git-repo"
url = "https://github.com/ProfessorManhattan/candy-icons.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/yoru"]
type = "git-repo"
url = "https://github.com/rxyhn/yoru.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/dracula"]
type = "git-repo"
url = "https://github.com/dracula/dracula-theme.git"
url = "https://github.com/installdoc/bento.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
@ -73,86 +28,25 @@
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".config/desktop/gnome.yml"]
type = "file"
url = "https://gitlab.com/megabyte-labs/gas-station/-/raw/master/environments/prod/group_vars/all/defaults.yml"
[".config/desktop/settings.yml"]
type = "file"
url = "https://gitlab.com/megabyte-labs/gas-station/-/raw/master/roles/system/theme/vars/main.yml"
[".config/helm/config.yml"]
type = "file"
url = "https://gitlab.com/megabyte-labs/gas-station/-/raw/master/environments/prod/group_vars/all/helm.yml"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/blesh/src"]
type = "git-repo"
url = "https://github.com/akinomyoga/ble.sh.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--shallow-submodules", "--recursive", "--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/bash_it"]
type = "git-repo"
url = "https://github.com/Bash-it/bash-it.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/emsdk"]
type = "git-repo"
url = "https://github.com/emscripten-core/emsdk.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/has"]
type = "git-repo"
url = "https://github.com/kdabir/has.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/wait-for-it"]
type = "git-repo"
url = "https://github.com/vishnubob/wait-for-it.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/hoard"]
type = "git-repo"
url = "https://github.com/Hyde46/hoard.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/shell-safe-rm"]
type = "git-repo"
url = "https://github.com/kaelzhang/shell-safe-rm.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/extract"]
type = "git-repo"
url = "https://github.com/xvoland/Extract.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/up"]
type = "git-repo"
url = "https://github.com/shannonmoeller/up.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/shell/sensible.bash"]
type = "file"
url = "https://raw.githubusercontent.com/mrzool/bash-sensible/master/sensible.bash"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/rsync-time-backup"]
type = "git-repo"
url = "https://github.com/laurent22/rsync-time-backup.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/tmpmail"]
type = "git-repo"
url = "https://github.com/sdushantha/tmpmail.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/concurrent"]
type = "git-repo"
url = "https://github.com/themattrix/bash-concurrent.git"
@ -171,53 +65,7 @@
# refreshPeriod = "{{ $refreshPeriod }}"
# clone.args = ["--depth", "1"]
# pull.args = ["--ff-only"]
[".config/shell/lscolors.sh"]
type = "file"
url = "https://raw.githubusercontent.com/trapd00r/LS_COLORS/master/lscolors.sh"
refreshPeriod = "{{ $refreshPeriod }}"
[".config/tmux/tmux.conf"]
type = "file"
url = "https://raw.githubusercontent.com/gpakosz/.tmux/master/.tmux.conf"
refreshPeriod = "{{ $refreshPeriod }}"
[".config/tmux/tmux.conf.local"]
type = "file"
url = "https://raw.githubusercontent.com/gpakosz/.tmux/master/.tmux.conf.local"
[".local/scripts/antigen.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/zsh-users/antigen/develop/bin/antigen.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/scripts/fzf-git.bash"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf-git.sh/main/fzf-git.sh"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/scripts/fzf-tmux.bash"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/bin/fzf-tmux"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/delta/themes.gitconfig"]
type = "file"
url = "https://raw.githubusercontent.com/dandavison/delta/master/themes.gitconfig"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/zsh/site-functions/fzf.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/shell/completion.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/zsh/site-functions/fzf-key-bindings.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/shell/key-bindings.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
[".config/vim/autoload/plug.vim"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim"
refreshPeriod = "{{ $refreshPeriod }}"
### ASDF
[".local/share/asdf"]
type = "git-repo"
url = "https://github.com/asdf-vm/asdf.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
{{- if eq .host.distro.id "darwin" }}
### Crunch
@ -229,92 +77,8 @@
pull.args = ["--ff-only"]
{{- end }}
{{- if (lookPath "apt-get") }}
### Netdata Debsecan
[".local/share/netdata-debsecan"]
type = "git-repo"
url = "https://gitlab.com/nodiscc/netdata-debsecan.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
{{- end }}
### Netdata Speedtest (required for Netdata speedtest plugin)
[".local/share/netdata-speedtest"]
type = "git-repo"
url = "https://github.com/ohthehugemanatee/netdata-speedtest.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Git Template
# [".config/git/template/_/husky.sh"]
# type = "file"
# url = "https://github.com/typicode/husky/raw/main/husky.sh"
# refreshPeriod = "{{ $refreshPeriod }}"
### Taskfiles
[".local/share/shared-common"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/common/shared.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Rundeck
[".local/system/src/var/lib/rundeck/libext/ansible-plugin-3.2.2.jar"]
type = "file"
url = "https://github.com/rundeck-plugins/ansible-plugin/releases/download/v3.2.2/ansible-plugin-3.2.2.jar"
refreshPeriod = "{{ $refreshPeriod }}"
### Chef Bento
[".local/share/bento"]
type = "git-repo"
url = "https://github.com/installdoc/bento.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Git Fuzzy
[".local/share/git-fuzzy"]
type = "git-repo"
url = "https://github.com/bigH/git-fuzzy.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### AI / GPT
[".local/share/agentgpt"]
type = "git-repo"
url = "https://github.com/reworkd/AgentGPT.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/kaguya"]
type = "git-repo"
url = "https://github.com/ykdojo/kaguya.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/localgpt"]
type = "git-repo"
url = "https://github.com/PromtEngineer/localGPT.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/shortgpt"]
type = "git-repo"
url = "https://github.com/rayventura/shortgpt.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/quivr"]
type = "git-repo"
url = "https://github.com/StanGirard/Quivr.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Ansible Roles / Playbook
# TODO: Remove all Gas Station references
[".local/share/gas-station"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/gas-station.git"
@ -335,56 +99,6 @@
url = "https://github.com/ProfessorManhattan/ansible-modules-bitwarden/raw/master/lookup_plugins/bitwarden.py"
refreshPeriod = "{{ $refreshPeriod }}"
### Application Styles
# Discord (TODO: Apply this to Discord automatically)
[".local/share/fluent/discord/theme.css"]
type = "file"
url = "https://raw.githubusercontent.com/DiscordStyles/Fluent/deploy/Fluent.theme.css"
refreshPeriod = "{{ $refreshPeriod }}"
# Steam (TODO: Apply this to Steam automatically by detecting first if it is installed)
[".local/share/fluent/steam"]
type = "git-repo"
url = "https://github.com/purogamer/Fluent-for-Steam.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Packer
[".local/share/packer/archlinux"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/archlinux-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/centos"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/centos-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/debian"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/debian-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/fedora"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/fedora-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/macos"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/macos-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/ubuntu"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/ubuntu-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/packer/windows"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/packer/windows-desktop.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
# https://github.com/kholia/OSX-KVM
[".local/share/osx-kvm"]
type = "git-repo"
@ -399,14 +113,146 @@
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### JumpUSB
[".local/src/jumpusb"]
######################################
### AI / GPT #########################
######################################
### AgentGPT
[".local/share/agentgpt"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/jumpusb.git"
url = "https://github.com/reworkd/AgentGPT.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Kaguya
[".local/share/kaguya"]
type = "git-repo"
url = "https://github.com/ykdojo/kaguya.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### LocalGPT
[".local/share/localgpt"]
type = "git-repo"
url = "https://github.com/PromtEngineer/localGPT.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### ShortGPT
[".local/share/shortgpt"]
type = "git-repo"
url = "https://github.com/rayventura/shortgpt.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Quivr
[".local/share/quivr"]
type = "git-repo"
url = "https://github.com/StanGirard/Quivr.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
######################################
### CLI ##############################
######################################
### Antigen
[".local/scripts/antigen.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/zsh-users/antigen/develop/bin/antigen.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
### ASDF
[".local/share/asdf"]
type = "git-repo"
url = "https://github.com/asdf-vm/asdf.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Bash (Sensible defaults)
[".local/share/shell/sensible.bash"]
type = "file"
url = "https://raw.githubusercontent.com/mrzool/bash-sensible/master/sensible.bash"
refreshPeriod = "{{ $refreshPeriod }}"
### Ble.sh
[".local/share/blesh/src"]
type = "git-repo"
url = "https://github.com/akinomyoga/ble.sh.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--shallow-submodules", "--recursive", "--depth", "1"]
pull.args = ["--ff-only"]
### Bash It!
[".local/share/bash_it"]
type = "git-repo"
url = "https://github.com/Bash-it/bash-it.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### emsdk
[".local/share/emsdk"]
type = "git-repo"
url = "https://github.com/emscripten-core/emsdk.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### fzf
[".local/share/fzf/fzf-git.bash"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf-git.sh/main/fzf-git.sh"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/fzf/fzf-tmux.bash"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/bin/fzf-tmux"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/zsh/site-functions/fzf.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/shell/completion.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
[".local/share/zsh/site-functions/fzf-key-bindings.zsh"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/fzf/master/shell/key-bindings.zsh"
refreshPeriod = "{{ $refreshPeriod }}"
### Git Fuzzy
[".local/share/git-fuzzy"]
type = "git-repo"
url = "https://github.com/bigH/git-fuzzy.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### LSColors
[".config/shell/lscolors.sh"]
type = "file"
url = "https://raw.githubusercontent.com/trapd00r/LS_COLORS/master/lscolors.sh"
refreshPeriod = "{{ $refreshPeriod }}"
### TMux
[".config/tmux/tmux.conf"]
type = "file"
url = "https://raw.githubusercontent.com/gpakosz/.tmux/master/.tmux.conf"
refreshPeriod = "{{ $refreshPeriod }}"
[".config/tmux/tmux.conf.local"]
type = "file"
url = "https://raw.githubusercontent.com/gpakosz/.tmux/master/.tmux.conf.local"
### Update scripts
[".local/bin/update"]
type = "git-repo"
url = "https://github.com/UpdateCommand/update.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
refreshPeriod = "{{ $refreshPeriod }}"
######################################
### Fonts ############################
@ -462,6 +308,28 @@
{{- end }}
{{- end }}
######################################
### Netdata ##########################
######################################
{{- if (lookPath "apt-get") }}
### Netdata Debsecan
[".local/share/netdata-debsecan"]
type = "git-repo"
url = "https://gitlab.com/nodiscc/netdata-debsecan.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
{{- end }}
### Netdata Speedtest (required for Netdata speedtest plugin)
[".local/share/netdata-speedtest"]
type = "git-repo"
url = "https://github.com/ohthehugemanatee/netdata-speedtest.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
######################################
### Rofi #############################
######################################
@ -492,11 +360,87 @@
include = ["applets/**", "colors/**", "images/**", "launchers/**", "powermenu/**", "scripts/**", "config.rasi"]
{{- end }}
######################################
### Security Certificates ############
######################################
### CloudFlare
[".local/etc/ssl/cloudflare/Cloudflare_CA.crt"]
type = "file"
url = "https://developers.cloudflare.com/cloudflare-one/static/documentation/connections/Cloudflare_CA.crt"
[".local/etc/ssl/cloudflare/Cloudflare_CA.pem"]
type = "file"
url = "https://developers.cloudflare.com/cloudflare-one/static/documentation/connections/Cloudflare_CA.pem"
### cURL / Google Cloud SDK
[".local/etc/ssl/curl/cacert.pem"]
type = "file"
url = "https://curl.se/ca/cacert.pem"
### GPG
[".gnupg/gpg.conf"]
type = "file"
url = "https://raw.githubusercontent.com/drduh/config/master/gpg.conf"
### Vagrant
[".ssh/authorized_keys.vagrant"]
type = "file"
url = "https://raw.githubusercontent.com/hashicorp/vagrant/main/keys/vagrant.pub"
refreshPeriod = "{{ $refreshPeriod }}"
######################################
### Theme ############################
######################################
### Betelgeuse (GNOME / KDE / GRUB / Plymouth)
# TODO: Merge Betelgeuse theme into Install Doctor
[".local/src/betelgeuse"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/misc/betelgeuse.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/betelgeuse"]
type = "git-repo"
url = "https://gitlab.com/megabyte-labs/misc/betelgeuse.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
[".local/share/candy-icons"]
type = "git-repo"
url = "https://github.com/ProfessorManhattan/candy-icons.git"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### AwesomeWM
[".local/share/yoru"]
type = "git-repo"
url = "https://github.com/rxyhn/yoru.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
### Windows / Fluent Application Styles
# Discord (TODO: Apply this to Discord automatically)
[".local/share/fluent/discord/theme.css"]
type = "file"
url = "https://raw.githubusercontent.com/DiscordStyles/Fluent/deploy/Fluent.theme.css"
refreshPeriod = "{{ $refreshPeriod }}"
# Steam (TODO: Apply this to Steam automatically by detecting first if it is installed)
[".local/share/fluent/steam"]
type = "git-repo"
url = "https://github.com/purogamer/Fluent-for-Steam.git"
refreshPeriod = "{{ $refreshPeriod }}"
clone.args = ["--depth", "1"]
pull.args = ["--ff-only"]
######################################
### VIM / NVIM #######################
######################################
### VIM
[".config/vim/autoload/plug.vim"]
type = "file"
url = "https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim"
refreshPeriod = "{{ $refreshPeriod }}"
{{- $vimPlugins := .softwarePlugins.vim.plugins }}
{{- range $vimPlugin := $vimPlugins }}
{{- $folderName := trimSuffix ".git" (last (splitList "/" $vimPlugin)) }}

View file

@ -0,0 +1,175 @@
---
default_dconf_settings:
- key: /org/gnome/desktop/background/picture-uri
value: "'file:///usr/share/backgrounds/brad-huchteman-stone-mountain.jpg'"
- key: /org/gnome/shell/favorite-apps
value: "['org.gnome.Nautilus.desktop', 'com.brave.Browser.desktop', 'io.gitlab.librewolf-community.desktop', 'com.vscodium.codium.desktop', 'org.ferdium.Ferdium.desktop', 'com.getmailspring.Mailspring.desktop', 'org.gnome.Terminal.desktop', 'tabby.desktop', 'vmware-workstation.desktop', 'org.gnome.Connections.desktop', 'org.standardnotes.standardnotes.desktop', 'com.bitwarden.desktop.desktop', 'portmaster.desktop', 'gnome-control-center.desktop']"
- key: /org/gnome/shell/disable-user-extensions
value: 'false'
- key: /org/gnome/shell/enabled-extensions
value: "['improved-workspace-indicator@michaelaquilina.github.io', 'ssm-gnome@lgiki.net', 'sound-output-device-chooser@kgshank.net', 'ProxySwitcher@flannaghan.com', 'IP-Finder@linxgem33.com', 'vlan-switcher@darcato.github.io', 'dash-to-dock@micxgx.gmail.com', 'drive-menu@gnome-shell-extensions.gcampax.github.com', 'places-menu@gnome-shell-extensions.gcampax.github.com', 'gsconnect@andyholmes.github.io', 'bluetooth-quick-connect@bjarosze.gmail.com', 'mprisindicatorbutton@JasonLG1979.github.io', 'startup-measure@marco.trevi.me', 'pano@elhan.io', 'mutter-primary-gpu@zaidka.github.io', 'appindicatorsupport@rgcjonas.gmail.com', 'user-theme@gnome-shell-extensions.gcampax.github.com']"
- key: /org/gnome/shell/extensions/dash-to-dock/dash-max-icon-size
value: '40'
- key: /org/gnome/desktop/session/idle-delay
value: '600'
- key: /org/gnome/desktop/privacy/report-technical-problems
value: 'false'
- key: /org/gnome/settings-daemon/plugins/power/sleep-inactive-ac-timeout
value: '3600'
- key: /org/gnome/settings-daemon/plugins/power/power-saver-profile-on-low-battery
value: 'true'
- key: /org/gnome/desktop/calendar/show-weekdate
value: 'true'
- key: /org/gnome/desktop/interface/clock-format
value: "'12h'"
- key: /org/gnome/desktop/interface/clock-show-seconds
value: 'true'
- key: /org/gnome/desktop/interface/clock-show-weekday
value: 'true'
- key: /org/gnome/desktop/interface/color-scheme
value: "'prefer-dark'"
- key: /org/gnome/desktop/interface/document-font-name
value: "'Zilla Slab Medium 11'"
- key: /org/gnome/desktop/interface/enable-hot-corners
value: 'true'
- key: /org/gnome/desktop/interface/font-antialiasing
value: "'rgba'"
- key: /org/gnome/desktop/interface/font-hinting
value: "'medium'"
- key: /org/gnome/desktop/interface/font-name
value: "'Montserrat Medium 11'"
- key: /org/gnome/desktop/interface/gtk-theme
value: "'Betelgeuse'"
- key: /org/gnome/desktop/interface/icon-theme
value: "'Betelgeuse'"
- key: /org/gnome/desktop/interface/monospace-font-name
value: "'Hack Nerd Font 11'"
- key: /org/gnome/desktop/privacy/old-files-age
value: 14
- key: /org/gnome/desktop/privacy/recent-files-max-age
value: '7'
- key: /org/gnome/desktop/privacy/remove-old-temp-files
value: 'true'
- key: /org/gnome/desktop/privacy/remove-old-trash-files
value: 'true'
- key: /org/gnome/desktop/privacy/report-technical-problems
value: 'false'
- key: /org/gnome/desktop/wm/preferences/titlebar-font
value: "'Montserrat Bold 11'"
- key: /org/gnome/settings-daemon/plugins/power/power-saver-profile-on-low-battery
value: 'true'
- key: /org/gnome/settings-daemon/plugins/power/sleep-inactive-ac-timeout
value: '3600'
- key: /org/gnome/settings-daemon/plugins/power/sleep-inactive-ac-type
value: "'nothing'"
default_gnome_extensions:
# - url: https://extensions.gnome.org/extension/327/axe-menu/
# - url: https://extensions.gnome.org/extension/1176/argos/
- url: https://extensions.gnome.org/extension/615/appindicator-support/
regex: appindicator-support
- url: https://extensions.gnome.org/extension/19/user-themes/
regex: user-themes
settings:
- dconf write /org/gnome/shell/extensions/user-theme/name "'Betelgeuse'"
- url: https://extensions.gnome.org/extension/1319/gsconnect/
regex: gsconnect
settings:
- dconf write /org/gnome/shell/extensions/gsconnect/name "'Betelgeuse'"
- dconf write /org/gnome/shell/extensions/gsconnect/show-indicators true
- url: https://extensions.gnome.org/extension/4269/alphabetical-app-grid/
regex: AlphabeticalAppGrid@stuarthayhurst
settings:
- dconf write /org/gnome/shell/extensions/alphabetical-app-grid/sort-folder-contents true
- url: https://extensions.gnome.org/extension/307/dash-to-dock/
regex: dash-to-dock
settings:
- dconf write /org/gnome/shell/extensions/dash-to-dock/animate-show-apps true
- dconf write /org/gnome/shell/extensions/dash-to-dock/apply-custom-theme true
- dconf write /org/gnome/shell/extensions/dash-to-dock/custom-theme-shrink true
- dconf write /org/gnome/shell/extensions/dash-to-dock/dash-max-icon-size 30
- dconf write /org/gnome/shell/extensions/dash-to-dock/disable-overview-on-startup true
- dconf write /org/gnome/shell/extensions/dash-to-dock/dock-fixed false
- dconf write /org/gnome/shell/extensions/dash-to-dock/dock-position "'BOTTOM'"
- dconf write /org/gnome/shell/extensions/dash-to-dock/intellihide-mode "'FOCUS_APPLICATION_WINDOWS'"
- dconf write /org/gnome/shell/extensions/dash-to-dock/preview-size-scale 0.45000000000000001
- dconf write /org/gnome/shell/extensions/dash-to-dock/scroll-action "'cycle-windows'"
- dconf write /org/gnome/shell/extensions/dash-to-dock/show-apps-at-top true
- dconf write /org/gnome/shell/extensions/dash-to-dock/show-mounts-network false
- dconf write /org/gnome/shell/extensions/dash-to-dock/show-show-apps-button true
- url: https://extensions.gnome.org/extension/771/proxy-switcher/
regex: ProxySwitcher
- url: https://extensions.gnome.org/extension/3968/improved-workspace-indicator/
regex: improved-workspace-indicator
settings:
- dconf write /org/gnome/shell/extensions/improved-workspace-indicator/panel-position "'right'"
- url: https://extensions.gnome.org/extension/4506/simple-system-monitor/
regex: ssm-gnome
settings:
- dconf write /org/gnome/shell/extensions/simple-system-monitor/cpu-usage-text "'CPU'"
- dconf write /org/gnome/shell/extensions/simple-system-monitor/extension-order -50
- dconf write /org/gnome/shell/extensions/simple-system-monitor/extension-position "'center'"
- dconf write /org/gnome/shell/extensions/simple-system-monitor/font-family "'Hack Nerd Font'"
- dconf write /org/gnome/shell/extensions/simple-system-monitor/font-size 11
- dconf write /org/gnome/shell/extensions/simple-system-monitor/font-weight 400
- dconf write /org/gnome/shell/extensions/simple-system-monitor/is-cpu-usage-enable true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/is-download-speed-enable true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/is-memory-usage-enable true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/is-upload-speed-enable true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/memory-usage-text "'MEM'"
- dconf write /org/gnome/shell/extensions/simple-system-monitor/refresh-interval 3
- dconf write /org/gnome/shell/extensions/simple-system-monitor/show-extra-spaces true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/show-percent-sign true
- dconf write /org/gnome/shell/extensions/simple-system-monitor/text-color "'#ffffff'"
- url: https://extensions.gnome.org/extension/906/sound-output-device-chooser/
regex: sound-output-device-chooser
settings:
- dconf write /org/gnome/shell/extensions/sound-output-device-chooser/hide-menu-icons true
- dconf write /org/gnome/shell/extensions/sound-output-device-chooser/hide-on-single-device true
- dconf write /org/gnome/shell/extensions/sound-output-device-chooser/icon-theme "'monochrome'"
- dconf write /org/gnome/shell/extensions/sound-output-device-chooser/integrate-with-slider true
- dconf write /org/gnome/shell/extensions/sound-output-device-chooser/omit-device-origins false
- url: https://extensions.gnome.org/extension/2983/ip-finder/
regex: IP-Finder
settings:
- dconf write /org/gnome/shell/extensions/public-ip-address/actors-in-panel "'Flag'"
- dconf write /org/gnome/shell/extensions/public-ip-address/panel-vpn-ip-addr-colors false
- dconf write /org/gnome/shell/extensions/public-ip-address/position-in-panel "'right'"
# Alternative to the full screen activities overview
# - url: https://extensions.gnome.org/extension/6/applications-menu/
# regex: apps-menu
- url: https://extensions.gnome.org/extension/3061/vlan-switcher/
regex: vlan-switcher
# Works but does not have that many features and the top bar is somewhat crowded on smaller screens
# - url: https://extensions.gnome.org/extension/1762/lan-ip-address/
# regex: lan-ip-address
- url: https://extensions.gnome.org/extension/7/removable-drive-menu/
regex: drive-menu
- url: https://extensions.gnome.org/extension/5087/startup-measure/
regex: startup-measure
- url: https://extensions.gnome.org/extension/8/places-status-indicator/
regex: places-menu
- url: https://extensions.gnome.org/extension/1379/mpris-indicator-button/
regex: mprisindicatorbutton
- url: https://extensions.gnome.org/extension/5218/mutter-primary-gpu/
regex: mutter-primary-gpu
- url: https://extensions.gnome.org/extension/1401/bluetooth-quick-connect/
regex: bluetooth-quick-connect
settings:
- dconf write /org/gnome/shell/extensions/bluetooth-quick-connect/bluetooth-auto-power-on true
- dconf write /org/gnome/shell/extensions/bluetooth-quick-connect/refresh-button-on true
- dconf write /org/gnome/shell/extensions/bluetooth-quick-connect/show-battery-value-on true
- url: https://extensions.gnome.org/extension/5278/pano/
regex: pano
settings:
- if command -v apt-get > /dev/null; then sudo apt-get install -y gir1.2-gda-5.0 gir1.2-gsound-1.0; fi
- if command -v dnf > /dev/null; then sudo dnf install -y libgda libgda-sqlite; fi
- if command -v yum > /dev/null; then sudo yum install -y libgda libgda-sqlite; fi
- if command -v pacman > /dev/null; then sudo pacman -Sy libgda; fi
- if command -v zypper > /dev/null; then sudo zypper install -y libgda-6_0-sqlite typelib-1_0-Gda-6_0 typelib-1_0-GSound-1_0; fi
- dconf write /org/gnome/shell/extensions/pano/database-location "\\\"$HOME/.local/share/pano\\\""
- dconf write /org/gnome/shell/extensions/pano/history-length 50
- dconf write /org/gnome/shell/extensions/pano/play-audio-on-copy true
- dconf write /org/gnome/shell/extensions/pano/session-only-mode true
# Set below to true if you want select text for copy
- dconf write /org/gnome/shell/extensions/pano/sync-primary false

View file

@ -0,0 +1,214 @@
---
gsetting_configs:
# Enables CTRL+Shift+I to inspect Gtk options
- setting: org.gtk.Settings.Debug enable-inspector-keybinding
value: 'true'
- setting: org.gnome.desktop.interface gtk-theme
value: Betelgeuse
- setting: org.gnome.desktop.wm.preferences theme
value: Betelgeuse
- setting: org.gnome.desktop.wm.preferences titlebar-font
value: '"Montserrat Ultra-Bold 11"'
- setting: org.gnome.desktop.interface color-scheme
value: prefer-dark
- setting: org.gnome.desktop.interface monospace-font-name
value: '"Hack Nerd Font 11"'
- setting: org.gnome.desktop.interface document-font-name
value: '"Zilla Slab 11"'
- setting: org.gnome.desktop.interface font-name
value: '"Montserrat 11"'
hidden_system_tools:
- xfce4-terminal-settings.desktop
- xterm.desktop
- xfce-wmtweaks-settings.desktop
- xfce-workspaces-settings.desktop
- xfce4-sensors.desktop
- panel-preferences.desktop
- org.gnome.PackageUpdater.desktop
- exo-file-manager.desktop
- thunar-bulk-rename.desktop
- thunar-settings.desktop
- xfce4-appfinder.desktop
- xfce4-about.desktop
- xfce4-accessibility-settings.desktop
- pavucontrol.desktop
- paprefs.desktop
- exo-preferred-applications.desktop
kde_system_packages:
- kde-plasma-desktop
- kdeplasma-addons
- kvantum
- rofi
xconf_settings:
- channel: xfwm4
property: /general/theme
value: Betelgeuse
- channel: xfwm4
property: /general/title_font
value: Montserrat Ultra-Bold 10
- channel: xfwm4
property: /general/title_alignment
value: left
- channel: xsettings
property: /Net/ThemeName
value: Betelgeuse
- channel: xsettings
property: /Gtk/MonospaceFontName
value: Hack Nerd Font 10
- channel: xsettings
property: /Gtk/FontName
value: Montserrat Bold 10
- channel: xsettings
property: /Xfce/LastCustomDPI
value: 100
value_type: int
- channel: xsettings
property: /Xft/DPI
value: 100
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/blank-on-ac
value: 30
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/dpms-on-ac-off
value: 60
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/hibernate-button-action
value: 2
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/sleep-button-action
value: 1
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/battery-button-action
value: 3
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/power-button-action
value: 4
value_type: int
- channel: xfce4-power-manager
property: /xfce4-power-manager/general-notification
value: true
value_type: bool
- channel: xfce4-desktop
property: /backdrop/screen0/monitorDP-2/workspace0/last-image
value: /usr/share/backgrounds/images/aurora-stars.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitorDP-2/workspace1/last-image
value: /usr/share/backgrounds/images/blue-butter-flies.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitorDP-2/workspace2/last-image
value: /usr/share/backgrounds/images/underwater-turtle.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitorDP-2/workspace3/last-image
value: /usr/share/backgrounds/images/colorful-jellyfish.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitor0/workspace0/last-image
value: /usr/share/backgrounds/images/aurora-stars.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitor0/workspace1/last-image
value: /usr/share/backgrounds/images/blue-butter-flies.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitor0/workspace2/last-image
value: /usr/share/backgrounds/images/underwater-turtle.jpg
- channel: xfce4-desktop
property: /backdrop/screen0/monitor0/workspace3/last-image
value: /usr/share/backgrounds/images/colorful-jellyfish.jpg
- channel: xfce4-session
property: /compat/LaunchGNOME
value: true
value_type: bool
- channel: xfce4-session
property: /compat/LaunchKDE
value: true
value_type: bool
- channel: xfce4-panel
property: /panels/panel-1/icon-size
value: 28
value_type: int
- channel: xfce4-panel
property: /panels/panel-1/leave-opacity
value: 92
value_type: int
- channel: xfce4-panel
property: /panels/panel-1/enter-opacity
value: 100
value_type: int
- channel: xfce4-panel
property: /panels/panel-1/size
value: 37
value_type: int
- channel: xfce4-panel
property: /plugins/plugin-5/mode
value: 5
value_type: int
- channel: xfce4-panel
property: /plugins/plugin-5/show-military
value: true
value_type: bool
- channel: xfce4-panel
property: /plugins/plugin-5/show-seconds
value: true
value_type: bool
- channel: xfce4-panel
property: /plugins/plugin-5/timezone
value: America/New_York
- channel: xfce4-panel
property: /panels/panel-2/size
value: 67
value_type: int
- channel: xfce4-panel
property: /panels/panel-2/position-locked
value: true
value_type: bool
- channel: xfce4-panel
property: /panels/panel-2/position
value: p=12;x=1783;y=1406
- channel: xfce4-panel
property: /panels/panel-2/autohide-behavior
value: 2
value_type: int
- channel: xfce4-panel
property: /plugins/plugin-6/size-max
value: 32
value_type: int
- channel: xfce4-panel
property: /plugins/plugin-6/square-icons
value: true
value_type: bool
- channel: xfce4-panel
property: /plugins/plugin-6/names-hidden
value:
- xfce4-power-manager
- qui-disk-space
- channel: xfce4-panel
property: /plugins/plugin-1/button-icon
value: appvm-blue
- channel: xfce4-panel
property: /plugins/plugin-1/custom-menu
value: true
value_type: bool
- channel: xfce4-panel
property: /plugins/plugin-3/include-all-workspaces
value: true
value_type: bool
- channel: xfce4-panel
property: /plugins/plugin-3/grouping
value: 1
value_type: int
- channel: xsettings
property: /Gtk/CursorThemeName
value: Sweet-cursors
- channel: xsettings
property: /Net/SoundThemeName
value: MIUI
- channel: xsettings
property: /Net/IconThemeName
value: Betelgeuse

View file

@ -0,0 +1,155 @@
---
# yamllint disable rule:line-length
# @var helm_charts: [] # Settings used for deploying Helm charts. The keys of the `helm_charts` object can be added as an app
# to the `apps` variable (defined in `group_vars/all/apps.yml`) to deploy the application to your network stack.
helm_charts:
# @helm [Argo](https://argoproj.github.io/cd/) | [GitHub](https://github.com/argoproj/argo-cd) | [Helm](https://github.com/argoproj/argo-helm) - ArgoCD is a declarative GitOps continuous delivery platform.
argo:
command: helm install argocd argo/argo-cd
repository: https://argoproj.github.io/argo-helm
repository_name: argo
# @helm [Budibase](https://budibase.com/) | [GitHub](https://github.com/Budibase/budibase) | [Helm](https://docs.budibase.com/docs/kubernetes-k8s) - Budibase is a platform that allows you to codelessly create internal apps in minutes.
budibase:
command: helm install --create-namespace --namespace budibase budibase budibase/budibase
repository: https://budibase.github.io/budibase/
repository_name: budibase
# @helm [Cert-Manager](https://cert-manager.io/) | [GitHub](https://github.com/cert-manager/cert-manager) | [Helm](https://cert-manager.io/docs/installation/helm/) - *Cert-Manager* is a powerful and extensible X.509 certificate controller.
cert-manager:
command: helm install cert-manager jetstack/cert-manager --namespace cert-manager --create-namespace --version v1.8.0 --set installCRDs=true
repository: https://charts.jetstack.io
repository_name: jetstack
# @helm [Concourse](https://concourse-ci.org/) | [GitHub](https://github.com/concourse/concourse) | [Helm](https://github.com/concourse/concourse-chart) - Concourse is a sophisticated, open-source CI/CD platform that markets itself as, "the open-source continuous thing-doer."
concourse:
command: helm install concourse concourse/concourse
repository: https://concourse-charts.storage.googleapis.com/
repository_name: concourse
# @helm [Consul](https://www.consul.io/) | [GitHub](https://github.com/hashicorp/consul) | [Helm](https://www.consul.io/docs/k8s/installation/install) - HashiCorp Consul is a service networking solution to automate network configurations, discover services, and enable secure connectivity across any cloud or runtime.
consul:
command: helm install consul hashicorp/consul --set global.name=consul --create-namespace --namespace consul
repository: https://helm.releases.hashicorp.com
repository_name: hashicorp
# @helm [Drone](https://www.drone.io/) | [GitHub](https://github.com/harness/drone) | [Helm](https://github.com/drone/charts/blob/master/charts/drone/docs/install.md) - Drone is a simple, modern, multi-cloud-capable CI platform written in Go.
drone:
command: helm install --namespace drone drone drone/drone -f drone-values.yaml
repository: https://charts.drone.io
repository_name: drone
# @helm [Elastic ECK](https://www.elastic.co/) | [GitHub](https://github.com/elastic/cloud-on-k8s) | [Helm](https://www.elastic.co/guide/en/cloud-on-k8s/master/k8s-install-helm.html) - Elastic Cloud on Kubernetes (ECK) is the official operator by Elastic for automating the deployment, provisioning, management, and orchestration of Elasticsearch, Kibana, APM Server, Beats, Enterprise Search, Elastic Agent and Elastic Maps Server on Kubernetes.
elastic:
command: helm install elastic-operator elastic/eck-operator -n elastic-system --create-namespace
repository: https://helm.elastic.co
repository_name: elastic
# @helm [Falco](https://falco.org/) | [GitHub](https://github.com/falcosecurity/falco) | [Helm](https://github.com/falcosecurity/charts) - Falco is *the* cloud-native runtime security project.
falco:
command: helm install falco falcosecurity/falco
repository: https://falcosecurity.github.io/charts
repository_name: falcosecurity
# @helm [Fission](https://fission.io/) | [GitHub](https://github.com/fission/fission) | [Helm](https://fission.io/docs/installation/) - Fission is a framework for serverless functions on Kubernetes.
fission:
command: helm install --version v1.15.1 --namespace fission fission fission-charts/fission-all
repository: https://fission.github.io/fission-charts/
repository_name: fission-charts
# @helm [GitLab](https://about.gitlab.com/install/ce-or-ee/) | [GitHub](https://github.com/gitlabhq/gitlabhq) | [Helm](https://docs.gitlab.com/operator/installation.html#cluster) - GitLab is a single application that spans the entire software development lifecycle.
gitlab:
command: helm install gitlab-operator gitlab-operator/gitlab-operator --create-namespace --namespace gitlab-system
repository: https://gitlab.com/api/v4/projects/18899486/packages/helm/stable
repository_name: gitlab-operator
# @helm [GitLab Runner](https://docs.gitlab.com/runner/) | [GitHub](https://github.com/gitlabhq/gitlab-runner) | [Helm](https://docs.gitlab.com/runner/install/kubernetes.html) - This chart deploys an instance of GitLab runner to a Kubernetes cluster. GitLab runner allows you to attach container/VM instances to GitLab CI workflows.
gitlab-runner:
command: helm install --namespace <NAMESPACE> gitlab-runner -f <CONFIG_VALUES_FILE> gitlab/gitlab-runner
repository: https://charts.gitlab.io
repository_name: gitlab
# @helm [Graylog](https://www.graylog.org/) | [GitHub](https://github.com/Graylog2/graylog2-server) | [Helm](https://github.com/KongZ/charts/tree/main/charts/graylog) - Graylog is a leading centralized log management solution for capturing, storing, and enabling real-time analysis of terabytes of machine data.
graylog:
command: |
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo add elastic https://helm.elastic.co
helm install --namespace graylog graylog kongz/graylog
repository: https://charts.kong-z.com
repository_name: kongz
# @helm [Knative](https://knative.dev/docs/) | [GitHub](https://github.com/knative/serving) | [Operator](https://knative.dev/docs/install/operator/knative-with-operators/) - Knative is an open-source Enterprise-level solution to build serverless and event-driven applications. It manages serverless containers in Kubernetes environments.
knative:
operator: https://github.com/knative/operator/releases/download/knative-v1.4.0/operator.yaml
# @helm [Kubeapps](https://kubeapps.com/) | [GitHub](https://github.com/vmware-tanzu/kubeapps) | [Helm](https://github.com/vmware-tanzu/kubeapps) - Kubeapps is a web-based UI for deploying and managing applications in Kubernetes clusters.
kubeapps:
command: helm install kubeapps --namespace kubeapps bitnami/kubeapps
repository: https://charts.bitnami.com/bitnami
repository_name: bitnami
# @helm [Kubernetes Dashboard](https://kubernetes.io/docs/tasks/access-application-cluster/web-ui-dashboard/) | [GitHub](https://github.com/kubernetes/dashboard) | [Helm](https://artifacthub.io/packages/helm/k8s-dashboard/kubernetes-dashboard) - Kubernetes Dashboard is a general purpose, web-based UI for Kubernetes clusters. It allows users to manage applications running in the cluster and troubleshoot them, as well as manage the cluster itself.
kubernetes-dashboard:
command: helm install kubernetes-dashboard kubernetes-dashboard/kubernetes-dashboard
repository: https://kubernetes.github.io/dashboard/
repository_name: kubernetes-dashboard
# @helm [Linkerd](https://linkerd.io/) | [GitHub](https://github.com/linkerd/linkerd2) | [Helm](https://linkerd.io/2.10/tasks/install-helm/) - Linkerd is a service mesh that is ultra light, ultra simple, ultra powerful. According to their website, Linkerd adds security, observability, and reliability to Kubernetes, without the complexity.
linkerd:
command: |
if [[ "$OSTYPE" == "darwin"* ]]; then
CERT_EXP_DATE=$(date -v+8760H +"%Y-%m-%dT%H:%M:%SZ")
else
CERT_EXP_DATE=$(date -d '+8760 hour' +"%Y-%m-%dT%H:%M:%SZ")
fi
helm install linkerd2 --set-file identityTrustAnchorsPEM=ca.crt --set-file identity.issuer.tls.crtPEM=issuer.crt --set-file identity.issuer.tls.keyPEM=issuer.key --set identity.issuer.crtExpiry=$CERT_EXP_DATE linkerd/linkerd2
repository: https://helm.linkerd.io/stable
repository_name: linkered
# @helm [Loki](https://grafana.com/oss/loki/) | [GitHub](https://github.com/grafana/loki) | [Helm](https://grafana.com/docs/loki/latest/installation/microservices-helm/) - Grafana Loki is a horizontally scalable, highly available, multi-tenant log aggregation system inspired by Prometheus. It is designed to be very cost effective and easy to operate.
loki:
command: helm install loki-grafana grafana/grafana
repository: https://grafana.github.io/helm-charts
repository_name: grafana
# @helm [Minio](https://min.io/) | [GitHub](https://github.com/minio/minio) | [Helm](https://github.com/minio/minio/tree/master/helm/minio) - MinIO offers high-performance, S3 compatible object storage. Native to Kubernetes, MinIO is the only object storage suite available on every public cloud, every Kubernetes distribution, the private cloud and the edge.
minio:
command: helm install --namespace minio --set rootUser=rootuser,rootPassword=rootpass123 --generate-name minio/minio
repository: https://charts.min.io/
repository_name: minio
# @helm [n8n](https://n8n.io/) | [GitHub](https://github.com/n8n-io/n8n) | [Helm](https://artifacthub.io/packages/helm/open-8gears/n8n) - n8n is a free and open-source, self-hostable workflow automation tool that some consider to be a worthy replacement for IFTTT.
n8n:
command: helm install n8n open-8gears/n8n
repository: https://8gears.container-registry.com/chartrepo/library/
repository_name: open8-gears
# @helm [Prometheus Operator](https://prometheus-operator.dev/) | [GitHub](https://github.com/prometheus-operator/kube-prometheus) | [Helm](https://github.com/prometheus-community/helm-charts/tree/main/charts/kube-prometheus-stack) - A stack that includes everything required for an HA Prometheus / Grafana setup with pre-configured cluster monitoring and charts. It can also be modified to be used for any purpose that Prometheus / Grafana might be used for.
prometheus:
command: helm install prometheus prometheus-community/kube-prometheus-stack
repository: https://prometheus-community.github.io/helm-charts
repository_name: prometheus-community
# @helm [Rancher](https://rancher.com/) | [GitHub](https://github.com/rancher/rancher) | [Helm](https://rancher.com/docs/rancher/v2.5/en/installation/install-rancher-on-k8s/) - Rancher is a complete software stack for teams adopting containers. It addresses the operational and security challenges of managing multiple Kubernetes clusters, while providing DevOps teams with integrated tools for running containerized workloads.
rancher:
command: |
# Missing several steps
helm install rancher rancher-latest/rancher --namespace cattle-system --set hostname=rancher.my.org --set replicas=3
repository: https://releases.rancher.com/server-charts/latest
repository_name: rancher-latest
# @helm [Sentry](https://sentry.io/welcome/) | [GitHub](https://github.com/getsentry/sentry) | [Helm](https://artifacthub.io/packages/helm/sentry/sentry) - Sentry is the leading open-source error logging application that tracks with full stacktraces & asynchronous context. Sentry's eco-system includes dozens of SDKs, written for many different languages/environments.
sentry:
command: helm install sentry sentry/sentry
repository: https://sentry-kubernetes.github.io/charts
repository_name: sentry
# @helm [Space Cloud](https://space-cloud.io/) | [GitHub](https://github.com/spacecloud-io/space-cloud) | [Helm](https://github.com/spacecloud-io/space-cloud/blob/master/install-manifests/helm/index.yaml) - Space Cloud is an open-source Kubernetes-based serverless platform with built-in security and instant GraphQL APIs for any database and microservice.
space-cloud:
command: |
git clone https://github.com/spacecloud-io/space-cloud.git
cd install-manifests/helm
helm install space-cloud .
# @helm [Thanos](https://thanos.io/) | [GitHub](https://github.com/thanos-io/thanos) | [Helm](https://artifacthub.io/packages/helm/bitnami/thanos) - Thanos is an open source, highly available Prometheus setup with long term storage capabilities.
thanos:
command: helm install thanos bitnami/thanos
repository: https://charts.bitnami.com/bitnami
repository_name: bitnami
# @helm [Vault](https://www.vaultproject.io/) | [GitHub](https://github.com/hashicorp/vault) | [Helm](https://www.vaultproject.io/docs/platform/k8s/helm) - HashiCorp Vault is a secrets management tool specifically designed to control access to sensitive credentials in a low-trust environment. It can be used to store sensitive values and at the same time dynamically generate access for specific services/applications on lease.
vault:
command: helm install vault hashicorp/vault
repository: https://helm.releases.hashicorp.com
repository_name: hashicorp
# @helm [VaultWarden](https://bitwarden.com/) | [GitHub](https://github.com/dani-garcia/vaultwarden) | [Helm](https://artifacthub.io/packages/helm/k8s-at-home/vaultwarden) - VaultWarden is an optimized, resource-efficient version of the open source BitWarden web app (a password management platform).
vaultwarden:
command: helm install vaultwarden k8s-at-home/vaultwarden
repository: https://k8s-at-home.com/charts/
repository_name: k8s-at-home
# @helm [Vector](https://vector.dev/) | [GitHub](https://github.com/vectordotdev/vector) | [Helm](https://vector.dev/docs/setup/installation/package-managers/helm/) - Vector is a lightweight, ultra-fast tool for building observability pipelines that lets you collect, transform, and route all your logs and metrics with one simple tool.
vector:
command: helm install vector vector/vector --namespace vector --create-namespace --values values.yaml
repository: https://helm.vector.dev
repository_name: vector
# @helm [velero](https://velero.io/) | [GitHub](https://github.com/vmware-tanzu/velero) | [Helm](https://vmware-tanzu.github.io/helm-charts/) - Velero is an open source tool to safely backup and restore, perform disaster recovery, and migrate Kubernetes cluster resources and persistent volumes.
velero:
command: helm install vmware-tanzu/velero --namespace <YOUR NAMESPACE> -f values.yaml --generate-name
repository: https://vmware-tanzu.github.io/helm-charts
repository_name: vmware-tanzu

View file

@ -73,6 +73,11 @@ if command -v gping > /dev/null; then
alias ping='gping'
fi
### safe-rm
if command -v safe-rm > /dev/null; then
alias rm='safe-rm'
fi
### VIM
if command -v vim > /dev/null; then
alias vi="vim"

View file

@ -216,7 +216,7 @@ export GIT_MERGE_AUTOEDIT=no
if command -v delta > /dev/null; then
export GF_BAT_STYLE=changes
export GF_BAT_THEME=zenbur
export GF_SNAPSHOT_DIRECTORY="${XDG_DATA_HOME:-$HOME/.local/share}/git-fuzzy-snapshots"
export GF_SNAPSHOT_DIRECTORY="${XDG_DATA_HOME:-$HOME/.local/share}/git-fuzzy/snapshots"
export GF_PREFERRED_PAGER="delta --theme=gruvbox --highlight-removed -w __WIDTH__"
fi

View file

@ -82,8 +82,8 @@ if [ "$BASH_SUPPORT" = 'true' ]; then
fi
### fzf-git
#if [ -f "$HOME/.local/scripts/fzf-git.bash" ]; then
# . "$HOME/.local/scripts/fzf-git.bash"
# if [ -f "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-git.bash" ]; then
# . "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-git.bash"
# fi
### git-fuzzy
@ -118,8 +118,8 @@ if [ -d "${XDG_DATA_HOME:-$HOME/.local/share}/emsdk" ]; then
fi
### fzf-tmux
#if [ -f "$HOME/.local/scripts/fzf-tmux.bash" ]; then
# . "$HOME/.local/scripts/fzf-tmux.bash"
# if [ -f "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-tmux.bash" ]; then
# . "${$XDG_DATA_HOME:-$HOME/.local/share}/fzf/fzf-tmux.bash"
# fi
### McFly

View file

@ -0,0 +1,68 @@
#!/bin/bash
# Source: https://github.com/xvoland/Extract
# function Extract for common file formats
#
# This is a Bash function called "extract" that is designed to extract a variety of file formats.
# It takes one or more arguments, each of which represents a file or path that needs to be extracted.
# If no arguments are provided, the function displays usage instructions.
#
# This bash script allows to download a file from Github storage https://github.com/xvoland/Extract/blob/master/extract.sh
#
# Usage:
# extract <path/file_name>.<zip|rar|bz2|gz|tar|tbz2|tgz|Z|7z|xz|ex|tar.bz2|tar.gz|tar.xz|.zlib|.cso>
#
# Example:
# $ extract file_name.zip
#
# Author: Vitalii Tereshchuk, 2013
# Web: https://dotoca.net
# Github: https://github.com/xvoland/Extract/blob/master/extract.sh
SAVEIFS=$IFS
IFS="$(printf '\n\t')"
function extract {
if [ $# -eq 0 ]; then
# display usage if no parameters given
echo "Usage: extract <path/file_name>.<zip|rar|bz2|gz|tar|tbz2|tgz|Z|7z|xz|ex|tar.bz2|tar.gz|tar.xz|.zlib|.cso>"
echo " extract <path/file_name_1.ext> [path/file_name_2.ext] [path/file_name_3.ext]"
fi
for n in "$@"; do
if [ ! -f "$n" ]; then
echo "'$n' - file doesn't exist"
return 1
fi
case "${n%,}" in
*.cbt|*.tar.bz2|*.tar.gz|*.tar.xz|*.tbz2|*.tgz|*.txz|*.tar)
tar zxvf "$n" ;;
*.lzma) unlzma ./"$n" ;;
*.bz2) bunzip2 ./"$n" ;;
*.cbr|*.rar) unrar x -ad ./"$n" ;;
*.gz) gunzip ./"$n" ;;
*.cbz|*.epub|*.zip) unzip ./"$n" ;;
*.z) uncompress ./"$n" ;;
*.7z|*.apk|*.arj|*.cab|*.cb7|*.chm|*.deb|*.iso|*.lzh|*.msi|*.pkg|*.rpm|*.udf|*.wim|*.xar|*.vhd)
7z x ./"$n" ;;
*.xz) unxz ./"$n" ;;
*.exe) cabextract ./"$n" ;;
*.cpio) cpio -id < ./"$n" ;;
*.cba|*.ace) unace x ./"$n" ;;
*.zpaq) zpaq x ./"$n" ;;
*.arc) arc e ./"$n" ;;
*.cso) ciso 0 ./"$n" ./"$n.iso" && \
extract "$n.iso" && \rm -f "$n" ;;
*.zlib) zlib-flate -uncompress < ./"$n" > ./"$n.tmp" && \
mv ./"$n.tmp" ./"${n%.*zlib}" && rm -f "$n" ;;
*.dmg)
hdiutil mount ./"$n" -mountpoint "./$n.mounted" ;;
*)
echo "extract: '$n' - unknown archive method"
return 1
;;
esac
done
}
IFS=$SAVEIFS

View file

@ -1786,14 +1786,17 @@ async function installPlugins(pluginData) {
console.error(e)
}
} catch (e) {
log('error', 'Plugin', `Failed to install ${pluginData.package} plugin - ${plugin`)
log('error', 'Plugin', `Failed to install ${pluginData.package} plugin - ${plugin}`)
console.error(e)
}
}
}
if (pluginData.update) {
runCommand(`Updating ${pluginData.package} plugins`, pluginData.update)
const pluginDataPackage = pluginData.package
if (pluginDataPackage) {
runCommand('Updating ' + pluginDataPackage + ' plugins', pluginData.update)
}
}
}
@ -1879,7 +1882,7 @@ async function installSoftware(pkgsToInstall) {
}
installOrdersPre.length && log('info', 'Pre-Install', `Running package-specific pre-installation steps`)
for (const script of installOrdersPre) {
await $`${script}`
await $`${script.replace(/\n/g, '\n')}`
}
installOrdersGroups.length && log('info', 'Users / Groups', `Adding groups / users`)
for (const group of installOrdersGroups) {
@ -1907,7 +1910,7 @@ async function installSoftware(pkgsToInstall) {
for (const script of installOrdersPost) {
try {
log('info', 'Post Hook', script)
runCommand('Running post-install hook', script)
await $`${script.replace(/\n/g, '\n')}`
} catch(e) {
log('info', 'Post-Install Hook', 'Encountered error while running post-install hook')
}

View file

@ -0,0 +1,623 @@
#!/usr/bin/env bash
# Source: https://github.com/laurent22/rsync-time-backup
APPNAME=$(basename $0 | sed "s/\.sh$//")
# -----------------------------------------------------------------------------
# Log functions
# -----------------------------------------------------------------------------
fn_log_info() { echo "$APPNAME: $1"; }
fn_log_warn() { echo "$APPNAME: [WARNING] $1" 1>&2; }
fn_log_error() { echo "$APPNAME: [ERROR] $1" 1>&2; }
fn_log_info_cmd() {
if [ -n "$SSH_DEST_FOLDER_PREFIX" ]; then
echo "$APPNAME: $SSH_CMD '$1'";
else
echo "$APPNAME: $1";
fi
}
# -----------------------------------------------------------------------------
# Make sure everything really stops when CTRL+C is pressed
# -----------------------------------------------------------------------------
fn_terminate_script() {
fn_log_info "SIGINT caught."
exit 1
}
trap 'fn_terminate_script' SIGINT
# -----------------------------------------------------------------------------
# Small utility functions for reducing code duplication
# -----------------------------------------------------------------------------
fn_display_usage() {
echo "Usage: $(basename $0) [OPTION]... <[USER@HOST:]SOURCE> <[USER@HOST:]DESTINATION> [exclude-pattern-file]"
echo ""
echo "Options"
echo " -p, --port SSH port."
echo " -h, --help Display this help message."
echo " -i, --id_rsa Specify the private ssh key to use."
echo " --rsync-get-flags Display the default rsync flags that are used for backup. If using remote"
echo " drive over SSH, --compress will be added."
echo " --rsync-set-flags Set the rsync flags that are going to be used for backup."
echo " --rsync-append-flags Append the rsync flags that are going to be used for backup."
echo " --log-dir Set the log file directory. If this flag is set, generated files will"
echo " not be managed by the script - in particular they will not be"
echo " automatically deleted."
echo " Default: $LOG_DIR"
echo " --strategy Set the expiration strategy. Default: \"1:1 30:7 365:30\" means after one"
echo " day, keep one backup per day. After 30 days, keep one backup every 7 days."
echo " After 365 days keep one backup every 30 days."
echo " --no-auto-expire Disable automatically deleting backups when out of space. Instead an error"
echo " is logged, and the backup is aborted."
echo ""
echo "For more detailed help, please see the README file:"
echo ""
echo "https://github.com/laurent22/rsync-time-backup/blob/master/README.md"
}
fn_parse_date() {
# Converts YYYY-MM-DD-HHMMSS to YYYY-MM-DD HH:MM:SS and then to Unix Epoch.
case "$OSTYPE" in
linux*|cygwin*|netbsd*)
date -d "${1:0:10} ${1:11:2}:${1:13:2}:${1:15:2}" +%s ;;
FreeBSD*) date -j -f "%Y-%m-%d-%H%M%S" "$1" "+%s" ;;
darwin*)
# Under MacOS X Tiger
# Or with GNU 'coreutils' installed (by homebrew)
# 'date -j' doesn't work, so we do this:
yy=$(expr ${1:0:4})
mm=$(expr ${1:5:2} - 1)
dd=$(expr ${1:8:2})
hh=$(expr ${1:11:2})
mi=$(expr ${1:13:2})
ss=$(expr ${1:15:2})
perl -e 'use Time::Local; print timelocal('$ss','$mi','$hh','$dd','$mm','$yy'),"\n";' ;;
esac
}
fn_find_backups() {
fn_run_cmd "find "$DEST_FOLDER/" -maxdepth 1 -type d -name \"????-??-??-??????\" -prune | sort -r"
}
fn_expire_backup() {
# Double-check that we're on a backup destination to be completely
# sure we're deleting the right folder
if [ -z "$(fn_find_backup_marker "$(dirname -- "$1")")" ]; then
fn_log_error "$1 is not on a backup destination - aborting."
exit 1
fi
fn_log_info "Expiring $1"
fn_rm_dir "$1"
}
fn_expire_backups() {
local current_timestamp=$EPOCH
local last_kept_timestamp=9999999999
# we will keep requested backup
backup_to_keep="$1"
# we will also keep the oldest backup
oldest_backup_to_keep="$(fn_find_backups | sort | sed -n '1p')"
# Process each backup dir from the oldest to the most recent
for backup_dir in $(fn_find_backups | sort); do
local backup_date=$(basename "$backup_dir")
local backup_timestamp=$(fn_parse_date "$backup_date")
# Skip if failed to parse date...
if [ -z "$backup_timestamp" ]; then
fn_log_warn "Could not parse date: $backup_dir"
continue
fi
if [ "$backup_dir" == "$backup_to_keep" ]; then
# this is the latest backup requsted to be kept. We can finish pruning
break
fi
if [ "$backup_dir" == "$oldest_backup_to_keep" ]; then
# We dont't want to delete the oldest backup. It becomes first "last kept" backup
last_kept_timestamp=$backup_timestamp
# As we keep it we can skip processing it and go to the next oldest one in the loop
continue
fi
# Find which strategy token applies to this particular backup
for strategy_token in $(echo $EXPIRATION_STRATEGY | tr " " "\n" | sort -r -n); do
IFS=':' read -r -a t <<< "$strategy_token"
# After which date (relative to today) this token applies (X) - we use seconds to get exact cut off time
local cut_off_timestamp=$((current_timestamp - ${t[0]} * 86400))
# Every how many days should a backup be kept past the cut off date (Y) - we use days (not seconds)
local cut_off_interval_days=$((${t[1]}))
# If we've found the strategy token that applies to this backup
if [ "$backup_timestamp" -le "$cut_off_timestamp" ]; then
# Special case: if Y is "0" we delete every time
if [ $cut_off_interval_days -eq "0" ]; then
fn_expire_backup "$backup_dir"
break
fi
# we calculate days number since last kept backup
local last_kept_timestamp_days=$((last_kept_timestamp / 86400))
local backup_timestamp_days=$((backup_timestamp / 86400))
local interval_since_last_kept_days=$((backup_timestamp_days - last_kept_timestamp_days))
# Check if the current backup is in the interval between
# the last backup that was kept and Y
# to determine what to keep/delete we use days difference
if [ "$interval_since_last_kept_days" -lt "$cut_off_interval_days" ]; then
# Yes: Delete that one
fn_expire_backup "$backup_dir"
# backup deleted no point to check shorter timespan strategies - go to the next backup
break
else
# No: Keep it.
# this is now the last kept backup
last_kept_timestamp=$backup_timestamp
# and go to the next backup
break
fi
fi
done
done
}
fn_parse_ssh() {
# To keep compatibility with bash version < 3, we use grep
if echo "$DEST_FOLDER"|grep -Eq '^[A-Za-z0-9\._%\+\-]+@[A-Za-z0-9.\-]+\:.+$'
then
SSH_USER=$(echo "$DEST_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\1/')
SSH_HOST=$(echo "$DEST_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\2/')
SSH_DEST_FOLDER=$(echo "$DEST_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\3/')
if [ -n "$ID_RSA" ] ; then
SSH_CMD="ssh -p $SSH_PORT -i $ID_RSA ${SSH_USER}@${SSH_HOST}"
else
SSH_CMD="ssh -p $SSH_PORT ${SSH_USER}@${SSH_HOST}"
fi
SSH_DEST_FOLDER_PREFIX="${SSH_USER}@${SSH_HOST}:"
elif echo "$SRC_FOLDER"|grep -Eq '^[A-Za-z0-9\._%\+\-]+@[A-Za-z0-9.\-]+\:.+$'
then
SSH_USER=$(echo "$SRC_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\1/')
SSH_HOST=$(echo "$SRC_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\2/')
SSH_SRC_FOLDER=$(echo "$SRC_FOLDER" | sed -E 's/^([A-Za-z0-9\._%\+\-]+)@([A-Za-z0-9.\-]+)\:(.+)$/\3/')
if [ -n "$ID_RSA" ] ; then
SSH_CMD="ssh -p $SSH_PORT -i $ID_RSA ${SSH_USER}@${SSH_HOST}"
else
SSH_CMD="ssh -p $SSH_PORT ${SSH_USER}@${SSH_HOST}"
fi
SSH_SRC_FOLDER_PREFIX="${SSH_USER}@${SSH_HOST}:"
fi
}
fn_run_cmd() {
if [ -n "$SSH_DEST_FOLDER_PREFIX" ]
then
eval "$SSH_CMD '$1'"
else
eval $1
fi
}
fn_run_cmd_src() {
if [ -n "$SSH_SRC_FOLDER_PREFIX" ]
then
eval "$SSH_CMD '$1'"
else
eval $1
fi
}
fn_find() {
fn_run_cmd "find '$1'" 2>/dev/null
}
fn_get_absolute_path() {
fn_run_cmd "cd '$1';pwd"
}
fn_mkdir() {
fn_run_cmd "mkdir -p -- '$1'"
}
# Removes a file or symlink - not for directories
fn_rm_file() {
fn_run_cmd "rm -f -- '$1'"
}
fn_rm_dir() {
fn_run_cmd "rm -rf -- '$1'"
}
fn_touch() {
fn_run_cmd "touch -- '$1'"
}
fn_ln() {
fn_run_cmd "ln -s -- '$1' '$2'"
}
fn_test_file_exists_src() {
fn_run_cmd_src "test -e '$1'"
}
fn_df_t_src() {
fn_run_cmd_src "df -T '${1}'"
}
fn_df_t() {
fn_run_cmd "df -T '${1}'"
}
# -----------------------------------------------------------------------------
# Source and destination information
# -----------------------------------------------------------------------------
SSH_USER=""
SSH_HOST=""
SSH_DEST_FOLDER=""
SSH_SRC_FOLDER=""
SSH_CMD=""
SSH_DEST_FOLDER_PREFIX=""
SSH_SRC_FOLDER_PREFIX=""
SSH_PORT="22"
ID_RSA=""
SRC_FOLDER=""
DEST_FOLDER=""
EXCLUSION_FILE=""
LOG_DIR="$HOME/.$APPNAME"
AUTO_DELETE_LOG="1"
EXPIRATION_STRATEGY="1:1 30:7 365:30"
AUTO_EXPIRE="1"
RSYNC_FLAGS="-D --numeric-ids --links --hard-links --one-file-system --itemize-changes --times --recursive --perms --owner --group --stats --human-readable"
while :; do
case $1 in
-h|-\?|--help)
fn_display_usage
exit
;;
-p|--port)
shift
SSH_PORT=$1
;;
-i|--id_rsa)
shift
ID_RSA="$1"
;;
--rsync-get-flags)
shift
echo $RSYNC_FLAGS
exit
;;
--rsync-set-flags)
shift
RSYNC_FLAGS="$1"
;;
--rsync-append-flags)
shift
RSYNC_FLAGS="$RSYNC_FLAGS $1"
;;
--strategy)
shift
EXPIRATION_STRATEGY="$1"
;;
--log-dir)
shift
LOG_DIR="$1"
AUTO_DELETE_LOG="0"
;;
--no-auto-expire)
AUTO_EXPIRE="0"
;;
--)
shift
SRC_FOLDER="$1"
DEST_FOLDER="$2"
EXCLUSION_FILE="$3"
break
;;
-*)
fn_log_error "Unknown option: \"$1\""
fn_log_info ""
fn_display_usage
exit 1
;;
*)
SRC_FOLDER="$1"
DEST_FOLDER="$2"
EXCLUSION_FILE="$3"
break
esac
shift
done
# Display usage information if required arguments are not passed
if [[ -z "$SRC_FOLDER" || -z "$DEST_FOLDER" ]]; then
fn_display_usage
exit 1
fi
# Strips off last slash from dest. Note that it means the root folder "/"
# will be represented as an empty string "", which is fine
# with the current script (since a "/" is added when needed)
# but still something to keep in mind.
# However, due to this behavior we delay stripping the last slash for
# the source folder until after parsing for ssh usage.
DEST_FOLDER="${DEST_FOLDER%/}"
fn_parse_ssh
if [ -n "$SSH_DEST_FOLDER" ]; then
DEST_FOLDER="$SSH_DEST_FOLDER"
fi
if [ -n "$SSH_SRC_FOLDER" ]; then
SRC_FOLDER="$SSH_SRC_FOLDER"
fi
# Exit if source folder does not exist.
if ! fn_test_file_exists_src "${SRC_FOLDER}"; then
fn_log_error "Source folder \"${SRC_FOLDER}\" does not exist - aborting."
exit 1
fi
# Now strip off last slash from source folder.
SRC_FOLDER="${SRC_FOLDER%/}"
for ARG in "$SRC_FOLDER" "$DEST_FOLDER" "$EXCLUSION_FILE"; do
if [[ "$ARG" == *"'"* ]]; then
fn_log_error 'Source and destination directories may not contain single quote characters.'
exit 1
fi
done
# -----------------------------------------------------------------------------
# Check that the destination drive is a backup drive
# -----------------------------------------------------------------------------
# TODO: check that the destination supports hard links
fn_backup_marker_path() { echo "$1/backup.marker"; }
fn_find_backup_marker() { fn_find "$(fn_backup_marker_path "$1")" 2>/dev/null; }
if [ -z "$(fn_find_backup_marker "$DEST_FOLDER")" ]; then
fn_log_info "Safety check failed - the destination does not appear to be a backup folder or drive (marker file not found)."
fn_log_info "If it is indeed a backup folder, you may add the marker file by running the following command:"
fn_log_info ""
fn_log_info_cmd "mkdir -p -- \"$DEST_FOLDER\" ; touch \"$(fn_backup_marker_path "$DEST_FOLDER")\""
fn_log_info ""
exit 1
fi
# Check source and destination file-system (df -T /dest).
# If one of them is FAT, use the --modify-window rsync parameter
# (see man rsync) with a value of 1 or 2.
#
# The check is performed by taking the second row
# of the output of the first command.
if [[ "$(fn_df_t_src "${SRC_FOLDER}" | awk '{print $2}' | grep -c -i -e "fat")" -gt 0 ]]; then
fn_log_info "Source file-system is a version of FAT."
fn_log_info "Using the --modify-window rsync parameter with value 2."
RSYNC_FLAGS="${RSYNC_FLAGS} --modify-window=2"
elif [[ "$(fn_df_t "${DEST_FOLDER}" | awk '{print $2}' | grep -c -i -e "fat")" -gt 0 ]]; then
fn_log_info "Destination file-system is a version of FAT."
fn_log_info "Using the --modify-window rsync parameter with value 2."
RSYNC_FLAGS="${RSYNC_FLAGS} --modify-window=2"
fi
# -----------------------------------------------------------------------------
# Setup additional variables
# -----------------------------------------------------------------------------
# Date logic
NOW=$(date +"%Y-%m-%d-%H%M%S")
EPOCH=$(date "+%s")
KEEP_ALL_DATE=$((EPOCH - 86400)) # 1 day ago
KEEP_DAILIES_DATE=$((EPOCH - 2678400)) # 31 days ago
export IFS=$'\n' # Better for handling spaces in filenames.
DEST="$DEST_FOLDER/$NOW"
PREVIOUS_DEST="$(fn_find_backups | head -n 1)"
INPROGRESS_FILE="$DEST_FOLDER/backup.inprogress"
MYPID="$$"
# -----------------------------------------------------------------------------
# Create log folder if it doesn't exist
# -----------------------------------------------------------------------------
if [ ! -d "$LOG_DIR" ]; then
fn_log_info "Creating log folder in '$LOG_DIR'..."
mkdir -- "$LOG_DIR"
fi
# -----------------------------------------------------------------------------
# Handle case where a previous backup failed or was interrupted.
# -----------------------------------------------------------------------------
if [ -n "$(fn_find "$INPROGRESS_FILE")" ]; then
if [ "$OSTYPE" == "cygwin" ]; then
# 1. Grab the PID of previous run from the PID file
RUNNINGPID="$(fn_run_cmd "cat $INPROGRESS_FILE")"
# 2. Get the command for the process currently running under that PID and look for our script name
RUNNINGCMD="$(procps -wwfo cmd -p $RUNNINGPID --no-headers | grep "$APPNAME")"
# 3. Grab the exit code from grep (0=found, 1=not found)
GREPCODE=$?
# 4. if found, assume backup is still running
if [ "$GREPCODE" = 0 ]; then
fn_log_error "Previous backup task is still active - aborting (command: $RUNNINGCMD)."
exit 1
fi
elif [[ "$OSTYPE" == "netbsd"* ]]; then
RUNNINGPID="$(fn_run_cmd "cat $INPROGRESS_FILE")"
if ps -axp "$RUNNINGPID" -o "command" | grep "$APPNAME" > /dev/null; then
fn_log_error "Previous backup task is still active - aborting."
exit 1
fi
else
RUNNINGPID="$(fn_run_cmd "cat $INPROGRESS_FILE")"
if ps -p "$RUNNINGPID" -o command | grep "$APPNAME"
then
fn_log_error "Previous backup task is still active - aborting."
exit 1
fi
fi
if [ -n "$PREVIOUS_DEST" ]; then
# - Last backup is moved to current backup folder so that it can be resumed.
# - 2nd to last backup becomes last backup.
fn_log_info "$SSH_DEST_FOLDER_PREFIX$INPROGRESS_FILE already exists - the previous backup failed or was interrupted. Backup will resume from there."
fn_run_cmd "mv -- $PREVIOUS_DEST $DEST"
if [ "$(fn_find_backups | wc -l)" -gt 1 ]; then
PREVIOUS_DEST="$(fn_find_backups | sed -n '2p')"
else
PREVIOUS_DEST=""
fi
# update PID to current process to avoid multiple concurrent resumes
fn_run_cmd "echo $MYPID > $INPROGRESS_FILE"
fi
fi
# Run in a loop to handle the "No space left on device" logic.
while : ; do
# -----------------------------------------------------------------------------
# Check if we are doing an incremental backup (if previous backup exists).
# -----------------------------------------------------------------------------
LINK_DEST_OPTION=""
if [ -z "$PREVIOUS_DEST" ]; then
fn_log_info "No previous backup - creating new one."
else
# If the path is relative, it needs to be relative to the destination. To keep
# it simple, just use an absolute path. See http://serverfault.com/a/210058/118679
PREVIOUS_DEST="$(fn_get_absolute_path "$PREVIOUS_DEST")"
fn_log_info "Previous backup found - doing incremental backup from $SSH_DEST_FOLDER_PREFIX$PREVIOUS_DEST"
LINK_DEST_OPTION="--link-dest='$PREVIOUS_DEST'"
fi
# -----------------------------------------------------------------------------
# Create destination folder if it doesn't already exists
# -----------------------------------------------------------------------------
if [ -z "$(fn_find "$DEST -type d" 2>/dev/null)" ]; then
fn_log_info "Creating destination $SSH_DEST_FOLDER_PREFIX$DEST"
fn_mkdir "$DEST"
fi
# -----------------------------------------------------------------------------
# Purge certain old backups before beginning new backup.
# -----------------------------------------------------------------------------
if [ -n "$PREVIOUS_DEST" ]; then
# regardless of expiry strategy keep backup used for --link-dest
fn_expire_backups "$PREVIOUS_DEST"
else
# keep latest backup
fn_expire_backups "$DEST"
fi
# -----------------------------------------------------------------------------
# Start backup
# -----------------------------------------------------------------------------
LOG_FILE="$LOG_DIR/$(date +"%Y-%m-%d-%H%M%S").log"
fn_log_info "Starting backup..."
fn_log_info "From: $SSH_SRC_FOLDER_PREFIX$SRC_FOLDER/"
fn_log_info "To: $SSH_DEST_FOLDER_PREFIX$DEST/"
CMD="rsync"
if [ -n "$SSH_CMD" ]; then
RSYNC_FLAGS="$RSYNC_FLAGS --compress"
if [ -n "$ID_RSA" ] ; then
CMD="$CMD -e 'ssh -p $SSH_PORT -i $ID_RSA -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
else
CMD="$CMD -e 'ssh -p $SSH_PORT -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'"
fi
fi
CMD="$CMD $RSYNC_FLAGS"
CMD="$CMD --log-file '$LOG_FILE'"
if [ -n "$EXCLUSION_FILE" ]; then
# We've already checked that $EXCLUSION_FILE doesn't contain a single quote
CMD="$CMD --exclude-from '$EXCLUSION_FILE'"
fi
CMD="$CMD $LINK_DEST_OPTION"
CMD="$CMD -- '$SSH_SRC_FOLDER_PREFIX$SRC_FOLDER/' '$SSH_DEST_FOLDER_PREFIX$DEST/'"
fn_log_info "Running command:"
fn_log_info "$CMD"
fn_run_cmd "echo $MYPID > $INPROGRESS_FILE"
eval $CMD
# -----------------------------------------------------------------------------
# Check if we ran out of space
# -----------------------------------------------------------------------------
NO_SPACE_LEFT="$(grep "No space left on device (28)\|Result too large (34)" "$LOG_FILE")"
if [ -n "$NO_SPACE_LEFT" ]; then
if [[ $AUTO_EXPIRE == "0" ]]; then
fn_log_error "No space left on device, and automatic purging of old backups is disabled."
exit 1
fi
fn_log_warn "No space left on device - removing oldest backup and resuming."
if [[ "$(fn_find_backups | wc -l)" -lt "2" ]]; then
fn_log_error "No space left on device, and no old backup to delete."
exit 1
fi
fn_expire_backup "$(fn_find_backups | tail -n 1)"
# Resume backup
continue
fi
# -----------------------------------------------------------------------------
# Check whether rsync reported any errors
# -----------------------------------------------------------------------------
EXIT_CODE="1"
if [ -n "$(grep "rsync error:" "$LOG_FILE")" ]; then
fn_log_error "Rsync reported an error. Run this command for more details: grep -E 'rsync:|rsync error:' '$LOG_FILE'"
elif [ -n "$(grep "rsync:" "$LOG_FILE")" ]; then
fn_log_warn "Rsync reported a warning. Run this command for more details: grep -E 'rsync:|rsync error:' '$LOG_FILE'"
else
fn_log_info "Backup completed without errors."
if [[ $AUTO_DELETE_LOG == "1" ]]; then
rm -f -- "$LOG_FILE"
fi
EXIT_CODE="0"
fi
# -----------------------------------------------------------------------------
# Add symlink to last backup
# -----------------------------------------------------------------------------
fn_rm_file "$DEST_FOLDER/latest"
fn_ln "$(basename -- "$DEST")" "$DEST_FOLDER/latest"
fn_rm_file "$INPROGRESS_FILE"
exit $EXIT_CODE
done

View file

@ -0,0 +1,403 @@
#!/usr/bin/env sh
# Source: https://github.com/sdushantha/tmpmail
# by Siddharth Dushantha 2020
#
# Dependencies: jq, curl, w3m
#
version=1.2.3
# By default 'tmpmail' uses 'w3m' as it's web browser to render
# the HTML of the email
browser="w3m"
# The default command that will be used to copy the email address to
# the user's clipboard when running 'tmpmail --copy'
copy_to_clipboard_cmd="xclip -selection c"
# If the value is set to 'true' tmpmail will convert the HTML email
# to raw text and send that to stdout
raw_text=false
# Everything related to 'tmpmail' will be stored in /tmp/tmpmail
# so that the old emails and email addresses get cleared after
# restarting the computer
tmpmail_dir="/tmp/tmpmail"
# tmpmail_email_address is where we store the temporary email address
# that gets generated. This prevents the user from providing
# the email address everytime they run tmpmail
tmpmail_email_address="$tmpmail_dir/email_address"
# tmpmail.html is where the email gets stored.
# Even though the file ends with a .html extension, the raw text version of
# the email will also be stored in this file so that w3m and other browsers
# are able to open this file
tmpmail_html_email="$tmpmail_dir/tmpmail.html"
# Default 1secmail API URL
tmpmail_api_url="https://www.1secmail.com/api/v1/"
usage() {
# Using 'cat << EOF' we can easily output a multiline text. This is much
# better than using 'echo' for each line or using '\n' to create a new line.
cat <<EOF
tmpmail
tmpmail -h | --version
tmpmail -g [ADDRESS]
tmpmail [-t | -b BROWSER] -r | ID
When called with no option and no argument, tmpmail lists the messages in
the inbox and their numeric IDs. When called with one argument, tmpmail
shows the email message with specified ID.
-b, --browser BROWSER
Specify BROWSER that is used to render the HTML of
the email (default: w3m)
--clipboard-cmd COMMAND
Specify the COMMAND to use for copying the email address to your
clipboard (default: xclip -selection c)
-c, --copy
Copy the email address to your clipboard
-d, --domains
Show list of available domains
-g, --generate [ADDRESS]
Generate a new email address, either the specified ADDRESS, or
randomly create one
-h, --help
Show help
-r, --recent
View the most recent email message
-t, --text
View the email as raw text, where all the HTML tags are removed.
Without this option, HTML is used.
--version
Show version
EOF
}
get_list_of_domains() {
# Getting domains list from 1secmail API
data=$(curl -sL "$tmpmail_api_url?action=getDomainList")
# Number of available domains
data_length=$(printf %s "$data" | jq length)
# If the length of the data we got is 0, that means the email address
# has not received any emails yet.
[ "$data_length" -eq 0 ] && echo "1secmail API error for getting domains list" && exit
# Getting rid of quotes, braces and replace comma with space
printf "%s" "$data" | tr -d "[|]|\"" | tr "," " "
}
show_list_of_domains() {
# Convert the list of domains which are in a singal line, into multiple lines
# with a dash in the beginning of each domain for a clean output
domains=$(printf "%s" "$(get_list_of_domains)" | tr " " "\n" | sed "s/^/- /g")
printf "List of available domains: \n%s\n" "$domains"
}
generate_email_address() {
# There are 2 ways which this function is called in this script.
# [1] The user wants to generate a new email and runs 'tmpmail --generate'
# [2] The user runs 'tmpmail' to check the inbox , but /tmp/tmpmail/email_address
# is empty or nonexistant. Therefore a new email gets automatically
# generated before showing the inbox. But of course the inbox will
# be empty as the newly generated email address has not been
# sent any emails.
#
# When the function 'generate_email_address()' is called with the arguement
# 'true', it means that the function was called because the user
# ran 'tmpmail --generate'.
#
# We need this variable so we can know whether or not we need to show the user
# what the email was. <-- More about this can be found further down in this function.
externally=${1:-false}
# This variable lets generate_email_address know if the user has provided a custom
# email address which they want to use. custom is set to false if $2 has no value.
custom=${2:-false}
# Generate a random email address.
# This function is called whenever the user wants to generate a new email
# address by running 'tmpmail --generate' or when the user runs 'tmpmail'
# but /tmp/tmpmail/email_address is empty or nonexistent.
#
# We create a random username by taking the first 10 lines from /dev/random
# and delete all the characters which are *not* lower case letters from A to Z.
# So charcters such as dashes, periods, underscore, and numbers are all deleted,
# giving us a text which only contains lower case letters form A to Z. We then take
# the first 10 characters, which will be the username of the email address
username=$(head /dev/urandom | LC_ALL=C tr -dc "[:alnum:]" | cut -c1-11 | tr "[:upper:]" "[:lower:]")
# Generate a regex for valif email adress by fetching the list of supported domains
valid_email_address_regex=$(printf "[a-z0-9]+@%s" "$(get_list_of_domains | tr ' ' '|')")
username_black_list_regex="(abuse|webmaster|contact|postmaster|hostmaster|admin)"
username_black_list="- abuse\n- webmaster\n- contact\n- postmaster\n- hostmaster\n- admin"
# Randomly pick one of the domains mentioned above.
domain=$(printf "%b" "$(get_list_of_domains)" | tr " " "\n" | randomize | tail -1)
email_address="$username@$domain"
# If the user provided a custom email address then use that email address
if [ "$custom" != false ]; then
email_address=$custom
# Check if the user is using username in the email address which appears
# in the black list.
if printf %b "$email_address" | grep -Eq "$username_black_list_regex"; then
die "For security reasons, that username cannot be used. Here are the blacklisted usernames:\n$username_black_list"
fi
# Do a regex check to see if the email address provided by the user is a
# valid email address
if ! printf %b "$email_address" | grep -Eq "$valid_email_address_regex"; then
die "Provided email is invalid. Must match $valid_email_address_regex"
fi
fi
# Save the generated email address to the $tmpmail_email_address file
# so that it can be whenever 'tmpmail' is run
printf %s "$email_address" >"$tmpmail_email_address"
# If this function was called because the user wanted to generate a new
# email address, show them the email address
[ "$externally" = true ] && cat "$tmpmail_email_address" && printf "\n"
}
get_email_address() {
# This function is only called once and that is when this script
# get executed. The output of this function gets stored in $email_address
#
# If the file that contains the email address is empty,
# that means we do not have an email address, so generate one.
[ ! -s "$tmpmail_email_address" ] && generate_email_address
# Output the email address by getting the first line of $tmpmail_email
head -n 1 "$tmpmail_email_address"
}
list_emails() {
# List all the received emails in a nicely formatted order
#
# Fetch the email data using 1secmail's API
data=$(curl -sL "$tmpmail_api_url?action=getMessages&login=$username&domain=$domain")
# Using 'jq' we get the length of the JSON data. From this we can determine whether or not
# the email address has gotten any emails
data_length=$(printf %s "$data" | jq length)
# We are showing what email address is currently being used
# in case the user has forgotten what the email address was.
printf "[ Inbox for %s ]\n\n" "$email_address"
# If the length of the data we got is 0, that means the email address
# has not received any emails yet.
[ "$data_length" -eq 0 ] && echo "No new mail" && exit
# This is where we store all of our emails, which is then
# displayed using 'column'
inbox=""
# Go through each mail that has been received
index=1
while [ $index -le "${data_length}" ]; do
# Since arrays in JSON data start at 0, we must subtract
# the value of $index by 1 so that we dont miss one of the
# emails in the array
mail_data=$(printf %s "$data" | jq -r ".[$index-1]")
id=$(printf %s "$mail_data" | jq -r ".id")
from=$(printf %s "$mail_data" | jq -r ".from")
subject=$(printf %s "$mail_data" | jq -r ".subject")
# The '||' are used as a divideder for 'column'. 'column' will use this divider as
# a point of reference to create the division. By default 'column' uses a blank space
# but that would not work in our case as the email subject could have multiple white spaces
# and 'column' would split the words that are seperated by white space, in different columns.
inbox="$inbox$id ||$from ||$subject\n"
index=$((index + 1))
done
# Show the emails cleanly
printf "%b" "$inbox" | column -t -s "||"
}
randomize() {
# We could use 'shuf' and 'sort -R' but they are not a part of POSIX
awk 'BEGIN {srand();} {print rand(), $0}' | \
sort -n -k1 | cut -d' ' -f2
}
view_email() {
# View an email by providing it's ID
#
# The first argument provided to this function will be the ID of the email
# that has been received
email_id="$1"
data=$(curl -sL "$tmpmail_api_url?action=readMessage&login=$username&domain=$domain&id=$email_id")
# After the data is retrieved using the API, we have to check if we got any emails.
# Luckily 1secmail's API is not complicated and returns 'Message not found' as plain text
# if our email address as not received any emails.
# If we received the error message from the API just quit because there is nothing to do
[ "$data" = "Message not found" ] && die "Message not found"
# We pass the $data to 'jq' which extracts the values
from=$(printf %s "$data" | jq -r ".from")
subject=$(printf %s "$data" | jq -r ".subject")
html_body=$(printf %s "$data" | jq -r ".htmlBody")
attachments=$(printf %s "$data" | jq -r ".attachments | length")
# If you get an email that is in pure text, the .htmlBody field will be empty and
# we will need to get the content from .textBody instead
[ -z "$html_body" ] && html_body="<pre>$(printf %s "$data" | jq -r ".textBody")</pre>"
# Create the HTML with all the information that is relevant and then
# assigning that HTML to the variable html_mail. This is the best method
# to create a multiline variable
html_mail=$(cat <<EOF
<pre><b>To: </b>$email_address
<b>From: </b>$from
<b>Subject: </b>$subject</pre>
$html_body
EOF
)
if [ ! "$attachments" = "0" ]; then
html_mail="$html_mail<br><b>[Attachments]</b><br>"
index=1
while [ "$index" -le "$attachments" ]; do
filename=$(printf %s "$data" | jq -r ".attachments | .[$index-1] | .filename")
link="$tmpmail_api_url?action=download&login=$username&domain=$domain&id=$email_id&file=$filename"
html_link="<a href=$link download=$filename>$filename</a><br>"
if [ "$raw_text" = true ]; then
# The actual url is way too long and does not look so nice in STDOUT.
# Therefore we will shortening it using is.gd so that it looks nicer.
link=$(curl -s -F"url=$link" "https://is.gd/create.php?format=simple")
html_mail="$html_mail$link [$filename]<br>"
else
html_mail="$html_mail$html_link"
fi
index=$((index + 1))
done
fi
# Save the $html_mail into $tmpmail_html_email
printf %s "$html_mail" >"$tmpmail_html_email"
# If the '--text' flag is used, then use 'w3m' to convert the HTML of
# the email to pure text by removing all the HTML tags
[ "$raw_text" = true ] && w3m -dump "$tmpmail_html_email" && exit
# Open up the HTML file using $browser. By default,
# this will be 'w3m'.
$browser "$tmpmail_html_email"
}
view_recent_email() {
# View the most recent email.
#
# This is done by listing all the received email like you
# normally see on the terminal when running 'tmpmail'.
# We then grab the ID of the most recent
# email, which the first line.
mail_id=$(list_emails | head -3 | tail -1 | cut -d' ' -f 1)
view_email "$mail_id"
}
copy_email_to_clipboard(){
# Copy the email thats being used to the user's clipboard
$copy_to_clipboard_cmd < $tmpmail_email_address
}
die() {
# Print error message and exit
#
# The first argument provided to this function will be the error message.
# Script will exit after printing the error message.
printf "%b\n" "Error: $1" >&2
exit 1
}
main() {
# Iterate of the array of dependencies and check if the user has them installed.
# We are checking if $browser is installed instead of checking for 'w3m'. By doing
# this, it allows the user to not have to install 'w3m' if they are using another
# browser to view the HTML.
#
# dep_missing allows us to keep track of how many dependencies the user is missing
# and then print out the missing dependencies once the checking is done.
dep_missing=""
# The main command from $copy_to_clipboard_cmd
# Example:
# xclip -selection c
# ├───┘
# └ This part
clipboard=${copy_to_clipboard_cmd%% *}
for dependency in jq $browser $clipboard curl; do
if ! command -v "$dependency" >/dev/null 2>&1; then
# Append to our list of missing dependencies
dep_missing="$dep_missing $dependency"
fi
done
if [ "${#dep_missing}" -gt 0 ]; then
printf %s "Could not find the following dependencies:$dep_missing"
exit 1
fi
# Create the $tmpmail_dir directory and dont throw any errors
# if it already exists
mkdir -p "$tmpmail_dir"
# Get the email address and save the value to the email_address variable
email_address="$(get_email_address)"
# ${VAR#PATTERN} Removes shortest match of pattern from start of a string.
# In this case, it takes the email_address and removed everything after
# the '@' symbol which gives us the username.
username=${email_address%@*}
# ${VAR%PATTERN} Remove shortest match of pattern from end of a string.
# In this case, it takes the email_address and removes everything until the
# period '.' which gives us the domain
domain=${email_address#*@}
# If no arguments are provided just the emails
[ $# -eq 0 ] && list_emails && exit
while [ "$1" ]; do
case "$1" in
--help | -h) usage && exit ;;
--domains | -d) show_list_of_domains && exit ;;
--generate | -g) generate_email_address true "$2" && exit ;;
--clipboard-cmd) copy_to_clipboard_cmd="$2" ;;
--copy | -c) copy_email_to_clipboard && exit ;;
--browser | -b) browser="$2" ;;
--text | -t) raw_text=true ;;
--version) echo "$version" && exit ;;
--recent | -r) view_recent_email && exit ;;
*[0-9]*)
# If the user provides number as an argument,
# assume its the ID of an email and try getting
# the email that belongs to the ID
view_email "$1" && exit
;;
-*) die "option '$1' does not exist" ;;
esac
shift
done
}
main "$@"

View file

@ -0,0 +1,183 @@
#!/usr/bin/env bash
# Source: https://github.com/vishnubob/wait-for-it
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

View file

@ -1,39 +0,0 @@
#!/usr/bin/env bash
### Source .bashrc if it is present
if [ -f "$HOME/.bashrc" ]; then
. "$HOME/.bashrc"
elif [ -f "$HOME/.config/shell/exports.sh" ]; then
. "$HOME/.config/shell/exports.sh"
fi
### XDG
export XDG_CONFIG_HOME="${XDG_CONFIG_HOME:-$HOME/.config}"
export XDG_CACHE_HOME="${XDG_CACHE_HOME:-$HOME/.cache}"
export XDG_DATA_HOME="${XDG_DATA_HOME:-$HOME/.local/share}"
### Ensure Homebrew PATHs
if [ -d "/home/linuxbrew/.linuxbrew/bin" ]; then
export HOMEBREW_PREFIX="/home/linuxbrew/.linuxbrew"
export HOMEBREW_CELLAR="/home/linuxbrew/.linuxbrew/Cellar"
export HOMEBREW_REPOSITORY="/home/linuxbrew/.linuxbrew/Homebrew"
export PATH="/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin${PATH+:$PATH}"
export MANPATH="/home/linuxbrew/.linuxbrew/share/man${MANPATH+:$MANPATH}:"
export INFOPATH="/home/linuxbrew/.linuxbrew/share/info:${INFOPATH:-}"
export WHALEBREW_INSTALL_PATH="/home/linuxbrew/.linuxbrew/whalebrew"
elif [ -f "/usr/local/bin/brew" ]; then
eval "$(/usr/local/bin/brew shellenv)"
elif [ -f "/opt/homebrew/bin/brew" ]; then
eval "$(/opt/homebrew/bin/brew shellenv)"
fi
export HOMEBREW_NO_ANALYTICS=1
### SDKMan
export SDKMAN_DIR="${XDG_DATA_HOME:-$HOME/.local/share}/sdkman"
### Volta
export VOLTA_HOME="${XDG_DATA_HOME:-$HOME/.local/share}/volta"
export PATH="$VOLTA_HOME/bin:$PATH"
### Ensure ~/.local/bin in PATH
export PATH="$HOME/.local/bin:$PATH"

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "extract" "extract.sh") -}}
{{ .chezmoi.homeDir }}/.local/share/extract/extract.sh
{{- end -}}

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "shell-safe-rm" "bin" "rm.sh") -}}
{{ .chezmoi.homeDir }}/.local/share/shell-safe-rm/bin/rm.sh
{{- end -}}

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "shell-safe-rm" "bin" "rm.sh") -}}
{{ .chezmoi.homeDir }}/.local/share/shell-safe-rm/bin/rm.sh
{{- end -}}

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "rsync-time-backup" "rsync-time-backup.sh") -}}
{{ .chezmoi.homeDir }}.local/share/rsync-time-backup/rsync-time-backup.sh
{{- end -}}

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "tmpmail" "tmpmail") -}}
{{ .chezmoi.homeDir }}/.local/share/tmpmail/tmpmail
{{- end -}}

View file

@ -1,3 +0,0 @@
{{- if stat (joinPath .chezmoi.homeDir ".local" "share" "wait-for-it" "wait-for-it.sh") -}}
{{ .chezmoi.homeDir }}/.local/share/wait-for-it/wait-for-it.sh
{{- end -}}

View file

@ -3780,6 +3780,19 @@ softwarePackages:
brew: git-bug
pkg-freebsd: git-bug
scoop: git-bug
git-branchless:
_bin: git-branchless
_github: https://github.com/arxanas/git-branchless
_name: Git Branchless
_post: |
echo "TODO: Consider implementing:"
echo "echo >> ~/.bashrc "alias git='git-branchless wrap --'""
apk: git-branchless
pacman: git-branchless
scoop: git-branchless
port: git-branchless
brew: git-branchless
cargo: --locked git-branchless
git-filter-repo:
_bin: git-filter-repo
_desc: '[gitfilterrepo](https://github.com/newren/git-filter-repo) is a versatile tool for rewriting history'
@ -6102,7 +6115,18 @@ softwarePackages:
bpkg: xwmx/nb
brew: nb
npm: nb.sh
_service: null
vdirsyncer:
_bin: vdirsyncer
_github: https://github.com/pimutils/vdirsyncer
_name: vdirsyncer
_pre: |
echo "TODO - Implement the following command after automating the process of setting up contact groups / calendars to sync"
echo "vdirsyncer discover contacts"
echo "vdirsyncer sync contacts"
echo "TODO - Add to cron"
echo "*/30 * * * * /usr/local/bin/vdirsyncer sync > /dev/null"
echo "This should be in _post instead of _pre - it is here for testing purposes"
pipx: vdirsyncer
charm:
_bin: charm
_desc: The Charm Tool and Library
@ -9247,6 +9271,11 @@ softwarePackages:
flatpak: com.microsoft.Teams
snap: teams-for-linux
yay: teams-for-linux
has:
_bin: has
_github: https://github.com/kdabir/has
_name: Has
brew: kdabir/tap/has
teamviewer:
_bin: teamviewer
_desc: '[TeamViewer](https://www.teamviewer.com/en-us/) is a proprietary software for remote access to as well as remote control and maintenance of computers and other devices, which was first released in 2005. The functionality has been expanded step by step, most recently for example through the integration of TeamViewer Meeting.'
@ -10011,6 +10040,11 @@ softwarePackages:
_name: Wails
ansible: professormanhattan.wails
go: github.com/wailsapp/wails/cmd/wails@latest
safe-rm:
_bin: safe-rm
_github: https://github.com/kaelzhang/shell-safe-rm
_name: safe-rm
npm: safe-rm
warp:
_bin: warp-cli
_desc: '[Cloudflare WARP Client](https://cloudflarewarp.com/) is a free app that makes your Internet safer'