Initialize Project

This commit is contained in:
Kishan Takoordyal 2025-08-11 21:41:44 +04:00
commit a871ef846e
Signed by: root
GPG Key ID: 938988C2E73F190F
30 changed files with 2955 additions and 0 deletions

33
.ansible-lint Normal file
View File

@ -0,0 +1,33 @@
---
profile: production
offline: false
var_naming_pattern: '^[a-z_][a-z0-9_]*$'
skip_list:
- var-naming[no-role-prefix]
mock_modules:
- community.general.npm
- community.general.timezone
- community.general.gem
- community.general.consul
- community.general.nomad_job
- community.hashi_vault.vault_kv2_get
- community.hashi_vault.vault_kv2_write
enable_list:
- args
- empty-string-compare
- no-log-password
- no-same-owner
- yaml
exclude_paths:
- .git/
- .gitea/
- .github/
- .trunk/
- .vscode/
- venv/
- inventory/mscc-demo/group_vars/all/custom.yml

40
.gitea/workflows/main.yml Normal file
View File

@ -0,0 +1,40 @@
name: Run tests and ansible-playbook
run-name: Run tests and ansible-playbook
on:
push:
branches:
- master
workflow_dispatch:
# inputs:
# logLevel:
# description: 'Log level'
# required: true
# default: 'info'
# type: choice
# options:
# - info
# - warning
# - debug
jobs:
perform-ansible-lint:
name: Perform Ansible Lint
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install pip, venv and git
run: |
apt update -qy
apt install -y python3-pip python3.10-venv git
- name: Configure safe directory
run: git config --global --add safe.directory '*'
- name: Run setup_venv.sh
run: |
chmod +x setup_venv.sh
./setup_venv.sh
- name: Run ansible-lint
run: |
source ./venv/bin/activate
PY_COLORS=1 ANSIBLE_FORCE_COLOR=1 ANSIBLE_CONFIG=./ansible.cfg ansible-lint --exclude=inventory

9
.gitignore vendored Normal file
View File

@ -0,0 +1,9 @@
venv
ansible_collections
.vscode
.trunk
*.DS_Store
*.retry
*.pyc
.venv
.vault_password

2
.prettierignore Normal file
View File

@ -0,0 +1,2 @@
inventory/mscc-demo/group_vars/all/custom.yml
inventory/mscc-demo/group_vars/all/vault.yml

15
ansible.cfg Normal file
View File

@ -0,0 +1,15 @@
[inventory]
enable_plugins = ini
[defaults]
ansible_managed = "Managed by ansible automation"
library = library/
inventory = ./inventory/mscc-demo
roles_path = playbooks/roles
collections_path = ./
host_key_checking = false
retry_files_enabled = false
pipelining = true
# interpreter_python = /usr/bin/python3
yaml_valid_extensions = .yaml, .yml, .vault
vault_password_file = ./.vault_password

View File

@ -0,0 +1,147 @@
---
access:
admin:
root: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCqxAULC+ij8V/6cF/EfHXyl6Ts28OGSMwcKg2sqXJZaO/B/BM4KYzx6kJpiCLdwDiJEd/TrYtNB1l/Yq9EVb2QMZZzposEi3mzhQuSJjLmRkxPk4BO0cd+RUl2deZ8iNw4v/7Vb67Ol6ieBbVt9yKzazNRPUJwCCIaAKWDYUjd8q1SyEPdx2oeFyo7M44BZBQcbcIzuhZ3v9H8Hp/6cBOfuJxpqZxXMRmhniaWCduBnAiIPnwXMIBNSr/L/q3B1RiOTB+OY5KaUZCLirgzLHgauIx22LFCdWLrVYYHSh6cXtN/HTqDi8USF9lbQXevjdh7lZk8SdHKLy3Ca+CgvxDox8TXWGh5RXGmqwObLw8sxvW9x47JKjmre9QkVeO1AgQBm0FVSAd/KIBir4XIrumzvHqRsZBMQLWQ4kJU68ZPALZE2oOuCBuesjhF9YOpsEJcMonnoQp1lWq4rmEMGqlwdmLu1umh+iY0FTbsfJb4lDsnLopWeLTplhFZ3OjODSMFZ2Mhvx6AMqACPi4nhXQGRkMaHSiM9LdrcuE6DCWJHkJpaWagScNY2mzYZfqSlsNu5SJTcqeUB+HH81gI4kIdhdbAYDVOpoF66ljDl74VZgC+lhiH46eLCcT3piR8n08l5MVuZpqUeee8HBQKNaQyCu+YcZAjMJ3simICpVFwQ== kishan@kinesis.games # noqa yaml[line-length]
edgeking810: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDCqxAULC+ij8V/6cF/EfHXyl6Ts28OGSMwcKg2sqXJZaO/B/BM4KYzx6kJpiCLdwDiJEd/TrYtNB1l/Yq9EVb2QMZZzposEi3mzhQuSJjLmRkxPk4BO0cd+RUl2deZ8iNw4v/7Vb67Ol6ieBbVt9yKzazNRPUJwCCIaAKWDYUjd8q1SyEPdx2oeFyo7M44BZBQcbcIzuhZ3v9H8Hp/6cBOfuJxpqZxXMRmhniaWCduBnAiIPnwXMIBNSr/L/q3B1RiOTB+OY5KaUZCLirgzLHgauIx22LFCdWLrVYYHSh6cXtN/HTqDi8USF9lbQXevjdh7lZk8SdHKLy3Ca+CgvxDox8TXWGh5RXGmqwObLw8sxvW9x47JKjmre9QkVeO1AgQBm0FVSAd/KIBir4XIrumzvHqRsZBMQLWQ4kJU68ZPALZE2oOuCBuesjhF9YOpsEJcMonnoQp1lWq4rmEMGqlwdmLu1umh+iY0FTbsfJb4lDsnLopWeLTplhFZ3OjODSMFZ2Mhvx6AMqACPi4nhXQGRkMaHSiM9LdrcuE6DCWJHkJpaWagScNY2mzYZfqSlsNu5SJTcqeUB+HH81gI4kIdhdbAYDVOpoF66ljDl74VZgC+lhiH46eLCcT3piR8n08l5MVuZpqUeee8HBQKNaQyCu+YcZAjMJ3simICpVFwQ== kishan@kinesis.games # noqa yaml[line-length]
worker: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDeT641t7758iHj+/tfSY8/NBd8pyirVtBvR9vsidAB7rBXvjIm6CAga+DuFjHi34NSn+L8VyaYhiwo9+Z1dxbMPV/eEaRMLuIdzrawPzUS7sap8H2nAAkaQH8vkizlWL+7F64TQ6PCBKMoFJ8aZarRfMPWYoLL/BNssssWCgqy7zlatorSkSs+dnn0cclLJWLwaaRUVoOcPl6VK+aVtLY4DIjq29XmvPJiIyN8bWSqnqbJTIbYJmfME2068jV8jdQ4+P/Imda4VrzjPDt8BFKzdvGEp5z1vlhrEfBrJBghkDawVt/j//CiiAXqnvbQzdQpRtr8u1EDDTfN5p6tN7/P3R9NGrh/M7dHJj7Q5opdOsTO74xpciuyCj9CUiQSp39pUWYyKDA+eDuVDxkSiYbfxnXD/nxQ3+PdJgXN2QJeQKK6+dYzdVOiCWW6ndcxqfxTUTSMthLNYoon/JN05iTu/TEKVyVh9tb9n2C6HXxPhbSGNu/DIMQKczLvq87MV7SgsWhUo+qONhIOZ5aJBPEREoEfrbP+D0d763cFTGI/95ryjW3omeRJDSNX2KPsT01Se+KSax4C45cVSLfdb+jk3DpRF28y0dH9PAr2C8Bud1WplNMZX4KNhQDq6eTID5Nc8AviclM2lJDBd356ElWnMkxJECy3V+f+8U7Hk0i8fw== # noqa yaml[line-length]
tenant: kinesis-nomad
issuer_name: kinesisgames
internal_ip_start_bits: '10.104.0'
nomad_cidr: '10.104.0.0/20'
root_ssh_key_size: 4096
timezone: Indian/Mauritius
swap_file_state: present
swap_file_path: /swapfile
swap_file_size_mb: '2000'
swap_file_existing_size_mb: '0'
swap_swappiness: '40'
swap_file_create_command: 'dd if=/dev/zero of={{ swap_file_path }} bs=1M count={{ swap_file_size_mb }}'
letsencrypt_email_address: kishan@konnect.dev
cloudflare_email_address: '{{ letsencrypt_email_address }}'
traefik_web_ui_addr: traefik.mscc.kinesis.world
traefik_auth_users:
- admin
- edgeking810
traefik_services:
- name: traefik-service
host: traefik.mscc.kinesis.world
service: 'http://127.0.0.1:8081'
auth: true
traefik_tcp_routers:
- name: gitea-ssh
host: ssh.gitea.mscc.kinesis.world
entrypoint: gitea_ssh
source_port: 4444
target_port: 2222
target_host: 127.0.0.1
base_docker_volumes_dir: '/opt/docker/volumes'
docker_containers:
- name: portainer
image: 'portainer/portainer-ce:2.30.0'
ports: ['8000:8000', '9000:9000', '9443:9443']
network:
domain_name: portainer.mscc.kinesis.world
custom_port: 9000
volumes:
- '/var/run/docker.sock:/var/run/docker.sock'
- '{{ base_docker_volumes_dir }}/portainer/data:/data'
recreate: false
network_mode: host
- name: gitea-db
image: mysql:8
ports: []
variables:
MYSQL_ROOT_PASSWORD: '{{ secrets["gitea-db"]["root_password"] }}'
MYSQL_DATABASE: '{{ secrets["gitea-db"]["database"] }}'
MYSQL_USER: '{{ secrets["gitea-db"]["username"] }}'
MYSQL_PASSWORD: '{{ secrets["gitea-db"]["password"] }}'
networks:
- name: gitea-net
aliases: ['gitea-db-svc']
volumes:
- '{{ base_docker_volumes_dir }}/gitea-db:/var/lib/mysql'
- name: gitea
image: gitea/gitea:1.23.8
ports: ['2222:22']
variables:
GITEA__database__NAME: '{{ secrets["gitea-db"]["database"] }}'
GITEA__database__USER: '{{ secrets["gitea-db"]["username"] }}'
GITEA__database__PASSWD: '{{ secrets["gitea-db"]["password"] }}'
GITEA__database__DB_TYPE: mysql
GITEA__database__HOST: 'gitea-db-svc:3306'
networks:
- name: gitea-net
network:
auth: false
domain_name: gitea.mscc.kinesis.world
custom_port: 3000
volumes:
- '{{ base_docker_volumes_dir }}/gitea-data:/data'
- name: gitea-runner
image: gitea/act_runner:0.2.11
variables:
CONFIG_FILE: /config.yaml
GITEA_INSTANCE_URL: https://gitea.mscc.kinesis.world/
GITEA_RUNNER_REGISTRATION_TOKEN: '{{ secrets["gitea-runner"]["registration_token"] }}'
data:
- dest: '{{ base_docker_volumes_dir }}/gitea-runner/config.yaml'
content: |
log:
level: info
runner:
file: .runner
capacity: 2
envs: {}
env_file: .env
timeout: 3h
insecure: false
fetch_timeout: 5s
fetch_interval: 2s
labels:
- "ubuntu-latest:docker://gitea/runner-images:ubuntu-latest"
- "ubuntu-22.04:docker://gitea/runner-images:ubuntu-22.04"
- "ubuntu-20.04:docker://gitea/runner-images:ubuntu-20.04"
- "node-latest:docker://node:latest"
- "rust-latest:docker://rust:latest"
- "docker-19-dind:docker://docker:19.03.12"
cache:
enabled: true
dir: /tmp/.cache
host: ""
port: 0
external_server: ""
container:
network: bridge
privileged: false
options:
workdir_parent:
valid_volumes: []
docker_host: ""
force_pull: true
force_rebuild: false
host:
workdir_parent:
networks:
- name: gitea-net
volumes:
- '{{ base_docker_volumes_dir }}/gitea-runner/config.yaml:/config.yaml'
- '/var/run/docker.sock:/var/run/docker.sock'
cron_jobs:
- name: backup-gitea-db
job: 'docker exec gitea-db /bin/bash -c "mkdir -p /var/lib/mysql/backups/ && find /var/lib/mysql/backups/ -mtime +1 -type f -delete && mysqldump -uroot -p''{{ secrets["gitea-db"]["root_password"] }}'' {{ secrets["gitea-db"]["database"] }} > /var/lib/mysql/backups/dump.$(date +%F_%R).sql"'
minute: '0'
hour: '*/12'

View File

@ -0,0 +1,25 @@
$ANSIBLE_VAULT;1.1;AES256
31303065633266656263636132653162306461306261356365363266356438653061333839323062
6333363730613039366532643533316133363933303134340a633135363361386162356361376365
32653031393338633661653266306462646130356532333036343731316238663032333063353332
3536363432616361660a326231396139306431353633373463396430343738623962386363363562
63663062376134636664656238643663666161373061656331613436656331323832366462393165
66383538326663613364383436356231346237633231383365653362313736303439623061373030
36643931326463343030353830626364643539383365333239633161366234343766333464633864
32313134363661343539663363396236333430363264623038636633383431643931303832313831
65363064623563363033616161313664643632343462636665303364326633383730343561643639
34636338356334666462353036666131363263386366336162613338356438303733313964633539
34396363633238366361313433333932316565613864333961646162376232336134353262646539
61643561366266643662376533366664346637663831353461333462376338393431306139343539
31613865363062623963393462613464363362396565623736313266323836613961366266323962
63323638393163323261643933353032303765386162653834646236313336623431333936303137
64366134613261336561653763356562363865396339663033626566613339343435323066636631
35343366373730363466303032616564303063376639333332396665626336343832636230643637
38353230373539343131326331623736326632653962386661353639303432323361633736303937
31646166343861303534646336663232353265336330656537613039373962643966613432336137
35633138373164346432343238353033376164306236323138613638393762363335653930613461
34323964343038643435626132656161393733356261383937303366386462626665653039356138
61646465386564393033613735343066336138326465383130326162323363373339336262313537
32393136626564313132613166303536306361366335373264343435643461376636626461613635
63636436646566613534666565656535376333386337393564313038346535633163396265633032
33373566356632653861

View File

@ -0,0 +1,6 @@
[all:vars]
ansible_ssh_private_key_file=~/.ssh/id_rsa
ansible_ssh_extra_args="-o IdentitiesOnly=yes -o StrictHostKeyChecking=no"
[server]
mscc-demo-instance ansible_host=128.199.78.165 ansible_ssh_user=edgeking810

22
playbooks/main.yml Normal file
View File

@ -0,0 +1,22 @@
---
- name: Include custom vars and configure access
hosts: all
roles:
- role: custom_vars
tags: always
- role: access
tags: access
- name: Common settings and configuration
hosts: all
roles:
- role: common
tags: common
- name: Deploy traefik and other docker containers
hosts: all
roles:
- role: traefik
tags: traefik
- role: docker
tags: docker

View File

@ -0,0 +1,3 @@
[Definition]
# 24 hours
dbpurgeage = 86400

View File

@ -0,0 +1,14 @@
[INCLUDES]
[DEFAULT]
# "maxretry" is the number of failures before a host get banned.
maxretry = 4
backend = systemd
[sshd]
[sshd-ddos]
[dropbear]
[selinux-ssh]
[nginx-http-auth]
[nginx-botsearch]
[recidive]
maxretry = 3
enabled = true

View File

@ -0,0 +1,123 @@
# $OpenBSD: sshd_config,v 1.103 2018/04/09 20:41:22 tj Exp $
# This is the sshd server system-wide configuration file. See
# sshd_config(5) for more information.
# This sshd was compiled with PATH=/usr/bin:/bin:/usr/sbin:/sbin
# The strategy used for options in the default sshd_config shipped with
# OpenSSH is to specify options with their default value where
# possible, but leave them commented. Uncommented options override the
# default value.
Include /etc/ssh/sshd_config.d/*.conf
Port 22
#AddressFamily any
#ListenAddress 0.0.0.0
#ListenAddress ::
#HostKey /etc/ssh/ssh_host_rsa_key
#HostKey /etc/ssh/ssh_host_ecdsa_key
#HostKey /etc/ssh/ssh_host_ed25519_key
# Ciphers and keying
#RekeyLimit default none
# Logging
#SyslogFacility AUTH
#LogLevel INFO
# Authentication:
#LoginGraceTime 2m
PermitRootLogin no
#StrictModes yes
#MaxAuthTries 6
#MaxSessions 10
#PubkeyAuthentication yes
# Expect .ssh/authorized_keys2 to be disregarded by default in future.
#AuthorizedKeysFile .ssh/authorized_keys .ssh/authorized_keys2
#AuthorizedPrincipalsFile none
#AuthorizedKeysCommand none
#AuthorizedKeysCommandUser nobody
# For this to work you will also need host keys in /etc/ssh/ssh_known_hosts
#HostbasedAuthentication no
# Change to yes if you don't trust ~/.ssh/known_hosts for
# HostbasedAuthentication
#IgnoreUserKnownHosts no
# Don't read the user's ~/.rhosts and ~/.shosts files
#IgnoreRhosts yes
# To disable tunneled clear text passwords, change to no here!
PasswordAuthentication no
PermitEmptyPasswords no
# Change to yes to enable challenge-response passwords (beware issues with
# some PAM modules and threads)
ChallengeResponseAuthentication no
# Kerberos options
#KerberosAuthentication no
#KerberosOrLocalPasswd yes
#KerberosTicketCleanup yes
#KerberosGetAFSToken no
# GSSAPI options
#GSSAPIAuthentication no
#GSSAPICleanupCredentials yes
#GSSAPIStrictAcceptorCheck yes
#GSSAPIKeyExchange no
# Set this to 'yes' to enable PAM authentication, account processing,
# and session processing. If this is enabled, PAM authentication will
# be allowed through the ChallengeResponseAuthentication and
# PasswordAuthentication. Depending on your PAM configuration,
# PAM authentication via ChallengeResponseAuthentication may bypass
# the setting of "PermitRootLogin yes
# If you just want the PAM account and session checks to run without
# PAM authentication, then enable this but set PasswordAuthentication
# and ChallengeResponseAuthentication to 'no'.
UsePAM yes
#AllowAgentForwarding yes
#AllowTcpForwarding yes
#GatewayPorts no
X11Forwarding yes
#X11DisplayOffset 10
#X11UseLocalhost yes
#PermitTTY yes
PrintMotd no
#PrintLastLog yes
#TCPKeepAlive yes
#PermitUserEnvironment no
#Compression delayed
#ClientAliveInterval 0
#ClientAliveCountMax 3
#UseDNS no
#PidFile /var/run/sshd.pid
#MaxStartups 10:30:100
#PermitTunnel no
#ChrootDirectory none
#VersionAddendum none
# no default banner path
#Banner none
# Allow client to pass locale environment variables
AcceptEnv LANG LC_*
# override default of no subsystems
Subsystem sftp /usr/lib/openssh/sftp-server
# Example of overriding settings on a per-user basis
#Match User anoncvs
# X11Forwarding no
# AllowTcpForwarding no
# PermitTTY no
# ForceCommand cvs server

View File

@ -0,0 +1,25 @@
---
- name: Restart sshd
ansible.builtin.service:
name: sshd
state: restarted
become: true
- name: Restart ssh
ansible.builtin.service:
name: ssh
state: restarted
become: true
- name: Enable and start fail2ban
ansible.builtin.service:
name: fail2ban
state: started
enabled: true
become: true
- name: Restart fail2ban
ansible.builtin.service:
name: fail2ban
state: restarted
become: true

View File

@ -0,0 +1,90 @@
---
- name: Create admin group
ansible.builtin.group:
name: admin
state: present
become: true
- name: Allow admin sudo access without password
ansible.builtin.lineinfile:
dest: /etc/sudoers
regexp: '^%admin'
line: '%admin ALL=(ALL) NOPASSWD:ALL'
backup: true
validate: visudo -cf %s
become: true
- name: Check if zsh is installed
ansible.builtin.stat:
path: /bin/zsh
register: access_zsh_installed
- name: Create accounts
ansible.builtin.user:
name: '{{ item.key }}'
password: ''
shell: '{{ "/bin/zsh" if access_zsh_installed.stat.exists else "/bin/bash" }}'
group: admin
generate_ssh_key: false
state: present
loop: '{{ access.admin | dict2items }}'
become: true
- name: Configure ssh keys
ansible.posix.authorized_key:
user: '{{ item.key }}'
key: '{{ item.value }}'
state: present
loop: '{{ access.admin | dict2items }}'
become: true
- name: Replace default sshd_config file
ansible.builtin.copy:
src: sshd_config
dest: /etc/ssh/sshd_config
mode: '0644'
become: true
notify:
- Restart sshd
- Restart ssh
- name: Install fail2ban
ansible.builtin.apt:
name: fail2ban
update_cache: true
state: present
become: true
notify: Enable and start fail2ban
- name: Configure fail2ban
ansible.builtin.copy:
src: '{{ item }}'
dest: '/etc/fail2ban/{{ item }}'
owner: root
group: root
mode: '0644'
become: true
loop:
- fail2ban.local
- jail.local
notify: Restart fail2ban
- name: Run handlers
ansible.builtin.meta: flush_handlers
- name: Modify inventory file in order to login with current user instead
ansible.builtin.lineinfile:
path: '{{ ansible_inventory_sources[0] }}/hosts'
regexp: '^{{ inventory_hostname }} .*'
line: '{{ inventory_hostname }} ansible_host={{ ansible_host }} ansible_ssh_user={{ lookup("env", "USER") }}' # noqa yaml[line-length]
loop: '{{ ansible_play_hosts }}'
delegate_to: localhost
- name: Add host to in-memory inventory
ansible.builtin.add_host:
hostname: '{{ inventory_hostname }}'
ansible_host: '{{ ansible_host }}'
ansible_ssh_user: '{{ lookup("env", "USER") }}'
- name: Refresh inventory
ansible.builtin.meta: refresh_inventory

View File

@ -0,0 +1,49 @@
---
common_packages: # noqa var-naming[no-role-prefix]
- ca-certificates
- apt-transport-https
- software-properties-common
- build-essential
- gnupg
- gnupg-agent
- mysql-client
- acl
- rsync
- zip
- unzip
- curl
- git
- lsof
- iputils-ping
- dnsutils
- iproute2
- python3
- python3-pip
- python3.11-venv
- jq
docker_packages: # noqa var-naming[no-role-prefix]
- docker-ce
- docker-ce-cli
- containerd.io
- docker-compose-plugin
zsh_packages: # noqa var-naming[no-role-prefix]
- zsh
- ruby
- ruby-dev
- libz-dev
- libiconv-hook1
- libiconv-hook-dev
- zlib1g-dev
- fzf
zsh_extensions: # noqa var-naming[no-role-prefix]
- repo: https://github.com/zsh-users/zsh-syntax-highlighting.git
dest: ~/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting
- repo: https://github.com/junegunn/fzf.git
dest: ~/.fzf
- repo: https://github.com/zsh-users/zsh-autosuggestions.git
dest: ~/.oh-my-zsh/custom/plugins/zsh-autosuggestions
- repo: https://github.com/romkatv/powerlevel10k.git
dest: ~/.oh-my-zsh/custom/themes/powerlevel10k

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,133 @@
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
fi
# If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
# Path to your oh-my-zsh installation.
export ZSH="$HOME/.oh-my-zsh"
ZSH_THEME="powerlevel10k/powerlevel10k"
POWERLEVEL10K_MODE="nerdfont-complete"
# Set list of themes to pick from when loading at random
# Setting this variable when ZSH_THEME=random will cause zsh to load
# a theme from this variable instead of looking in $ZSH/themes/
# If set to an empty array, this variable will have no effect.
# ZSH_THEME_RANDOM_CANDIDATES=( "robbyrussell" "agnoster" )
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion.
# Case-sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to automatically update without prompting.
DISABLE_UPDATE_PROMPT="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line if pasting URLs and other text is messed up.
# DISABLE_MAGIC_FUNCTIONS="true"
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
HIST_STAMPS="dd.mm.yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load?
# Standard plugins can be found in $ZSH/plugins/
# Custom plugins may be added to $ZSH_CUSTOM/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
# plugins=(git dnf zsh-syntax-highlighting zsh-autosuggestions fzf bgnotify)
plugins=(git dnf zsh-syntax-highlighting zsh-autosuggestions fzf)
source $ZSH/oh-my-zsh.sh
# User configuration
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
export DENO_INSTALL="$HOME/.deno"
export PATH="$DENO_INSTALL/bin:$PATH"
export CARGO_INSTALL="$HOME/.cargo"
export PATH="$CARGO_INSTALL/bin:$PATH"
export GEM_HOME="$(ruby -e 'puts Gem.user_dir')"
export PATH="$PATH:$GEM_HOME/bin"
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
[[ ! -f ~/.p10k.zsh ]] || source ~/.p10k.zsh
alias d="sudo docker"
alias m="mysql -u root -p"
alias vi="vim"
alias y="yarn"
alias yrw="yarn run watch"
alias yrp="yarn run prod"
alias gw="gulp watch"
alias s="git status"
alias pull="git pull"
alias a="git add ."
alias c="git commit -a -S -m"
alias p="git push"
alias ls="colorls --sd"
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
# rm ~/.docker/config.json
# eval "$(ssh-agent -s)"
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh

View File

@ -0,0 +1,41 @@
---
- name: Start Docker
ansible.builtin.service:
name: docker
state: started
become: true
- name: Enable Docker
ansible.builtin.service:
name: docker
enabled: true
become: true
- name: Start Nginx
ansible.builtin.service:
name: nginx
state: started
become: true
- name: Enable Nginx
ansible.builtin.service:
name: nginx
enabled: true
become: true
- name: Start Firewalld
ansible.builtin.service:
name: firewalld
state: started
become: true
- name: Enable Firewalld
ansible.builtin.service:
name: firewalld
enabled: true
become: true
- name: Reload Firewalld
ansible.builtin.command: firewall-cmd --reload
become: true
changed_when: true

View File

@ -0,0 +1,46 @@
---
- name: Add docker gpg key
ansible.builtin.apt_key:
url: https://download.docker.com/linux/ubuntu/gpg
keyring: /etc/apt/trusted.gpg.d/docker.gpg
become: true
- name: Add docker apt repository
ansible.builtin.apt_repository:
repo: 'deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable'
filename: docker
update_cache: true
become: true
- name: Install docker packages
ansible.builtin.apt:
name: '{{ docker_packages }}'
update_cache: true
state: present
become: true
notify:
- Start Docker
- Enable Docker
- name: Add users in docker group to run as non-root
ansible.builtin.user:
name: '{{ item.key }}'
groups: docker
append: true
state: present
loop: '{{ access.admin | dict2items }}'
become: true
- name: Add cronjob for deleting unused docker images
ansible.builtin.cron:
name: 'Docker prune images'
minute: '0'
hour: '0'
day: '*'
month: '*'
weekday: '*'
job: 'docker image prune -a'
become: true
- name: Run handlers
ansible.builtin.meta: flush_handlers

View File

@ -0,0 +1,47 @@
---
- name: Set timezone
community.general.timezone:
name: '{{ timezone }}'
when: timezone is defined
become: true
- name: Configure swap
ansible.builtin.import_tasks: swap.yml
become: true
become_user: root
- name: Update and upgrade packages
ansible.builtin.apt:
upgrade: true
update_cache: true
become: true
- name: Add cronjob for upgrading packages
ansible.builtin.cron:
name: Upgrade packages
minute: '0'
hour: '0'
day: '*'
month: '*'
weekday: '*'
job: apt update && apt upgrade -y
user: root
state: present
become: true
- name: Install Common packages # noqa package-latest
ansible.builtin.apt:
pkg: '{{ common_packages }}'
state: latest
update_cache: true
become: true
- name: Docker Setup
ansible.builtin.import_tasks: docker.yml
- name: Install yq
ansible.builtin.get_url:
url: https://github.com/mikefarah/yq/releases/latest/download/yq_linux_amd64
dest: /usr/bin/yq
mode: a+x
become: true

View File

@ -0,0 +1,64 @@
---
- name: Manage swap file entry in fstab
ansible.posix.mount:
name: swap
src: '{{ swap_file_path }}'
fstype: swap
opts: sw
state: '{{ swap_file_state }}'
- name: Check if swap file exists
ansible.builtin.stat:
path: '{{ swap_file_path }}'
get_checksum: false
register: _swap_file_check
changed_when: false
- name: Set variable for existing swap file size
ansible.builtin.set_fact:
swap_file_existing_size_mb: '{{ (_swap_file_check.stat.size / 1024 / 1024) | int }}'
when: _swap_file_check.stat.exists
- name: Disable swap
ansible.builtin.command: swapoff -a
when: swap_file_state == 'absent' or (swap_file_state == 'present' and swap_file_existing_size_mb != swap_file_size_mb)
changed_when: true
- name: Ensure swap file doesn't exist
ansible.builtin.file:
path: '{{ swap_file_path }}'
state: absent
when: swap_file_state == 'absent' or (swap_file_state == 'present' and swap_file_existing_size_mb != swap_file_size_mb)
- name: Ensure swap file exists # noqa no-free-form
ansible.builtin.command: >
{{ swap_file_create_command }}
creates='{{ swap_file_path }}'
register: _swap_file_create
when: swap_file_state == "present"
- name: Set permissions on swap file
ansible.builtin.file:
path: '{{ swap_file_path }}'
owner: root
group: root
mode: '0600'
when: swap_file_state == "present"
- name: Make swap file if necessary
ansible.builtin.command: mkswap {{ swap_file_path }}
when: swap_file_state == "present" and _swap_file_create is changed
register: _mkswap_result
changed_when: true
- name: Run swapon on the swap file
ansible.builtin.command: swapon {{ swap_file_path }}
when: swap_file_state == "present" and _mkswap_result is changed
changed_when: true
- name: Set swappiness
ansible.posix.sysctl:
name: vm.swappiness
value: '{{ swap_swappiness }}'
state: present
when: swap_file_state == "present"

View File

@ -0,0 +1,3 @@
---
- name: Load custom variables
ansible.builtin.include_vars: '{{ ansible_inventory_sources[0] }}/group_vars/all/custom.yml'

View File

@ -0,0 +1,75 @@
---
- name: Create volumes directory
ansible.builtin.file:
path: '{{ item | split(":") | first | split(".") | first }}'
state: directory
mode: '0755'
loop: '{{ container.volumes | default([]) }}'
become: true
- name: Reset container labels
ansible.builtin.set_fact:
_labels: []
- name: Add traefik labels
ansible.builtin.set_fact:
_labels:
- key: 'traefik.enable'
value: 'true'
- key: 'traefik.http.routers.{{ container.name }}-service-http.rule'
value: 'Host(`{{ container.network.domain_name }}`)'
- key: 'traefik.http.routers.{{ container.name }}-service-http.entrypoints'
value: 'http'
- key: 'traefik.http.routers.{{ container.name }}-service-https.rule'
value: 'Host(`{{ container.network.domain_name }}`)'
- key: 'traefik.http.routers.{{ container.name }}-service-https.entrypoints'
value: 'https'
- key: 'traefik.http.routers.{{ container.name }}-service-https.tls.certResolver'
value: 'acme-http'
when: container.network is defined and container.network.domain_name is defined
- name: Add traefik labels for auth
ansible.builtin.set_fact:
_labels: '{{ _labels + [{"key": "traefik.http.routers.{{ container.name }}-service-https.middlewares", "value": "auth@file"}] }}'
when: container.network is defined and (container.network.auth | default(false))
- name: Add traefik labels for custom_port
ansible.builtin.set_fact:
_labels: '{{ _labels + [{"key": "traefik.http.services.{{ container.name }}-service-https.loadbalancer.server.port", "value": "{{ container.network.custom_port | default(80) }}"}] }}' # noqa yaml[line-length]
- name: Save data to file
ansible.builtin.copy:
content: '{{ item.content }}'
dest: '{{ item.dest }}'
mode: '0644'
loop: '{{ container.data | default([]) }}'
become: true
- name: Create network(s)
community.docker.docker_network:
name: '{{ item.name }}'
ipam_config: '{{ item.ipam_config | default(omit) }}'
state: present
loop: '{{ container.networks | default([]) }}'
- name: 'Deploy container: {{ container.name }}'
community.docker.docker_container:
name: '{{ container.name }}'
image: '{{ container.image }}'
state: started
user: '{{ container.user | default(omit) }}'
command: '{{ container.command | default(omit) }}'
env: '{{ container.variables | default({}) }}'
ports: '{{ container.ports | default([]) }}'
volumes: '{{ container.volumes | default([]) }}'
restart: '{{ container.recreate | default(false) }}'
restart_policy: always
labels: '{{ _labels | default([]) | items2dict }}'
recreate: '{{ container.recreate | default(false) }}'
healthcheck: '{{ container.healthcheck | default(omit) }}'
network_mode: '{{ container.network_mode | default(omit) }}'
networks: '{{ container.networks | default(omit) }}'
capabilities: '{{ container.capabilities | default(omit) }}'
container_default_behavior: '{{ container.container_default_behavior | default(omit) }}'
interactive: '{{ container.interactive | default(omit) }}'
tty: '{{ container.tty | default(omit) }}'

View File

@ -0,0 +1,27 @@
---
- name: Login to custom docker registry
community.docker.docker_login:
registry_url: '{{ custom_docker_registry.server_address }}'
username: '{{ custom_docker_registry.username }}'
password: '{{ custom_docker_registry.password }}'
become: true
become_user: '{{ item.key }}'
loop: '{{ access.admin | dict2items }}'
when: custom_docker_registry is defined
- name: Deploy Docker containers
ansible.builtin.include_tasks: container.yml
loop: '{{ docker_containers }}'
loop_control:
loop_var: container
- name: Setup cron jobs
ansible.builtin.cron:
name: '{{ item.name }}'
job: '{{ item.job }}'
minute: '{{ item.minute }}'
hour: '{{ item.hour }}'
user: 'root'
become: true
become_user: root
loop: '{{ cron_jobs | default([]) }}'

View File

@ -0,0 +1,83 @@
---
- name: Create base volume directory for traefik
ansible.builtin.file:
path: '{{ base_docker_volumes_dir }}/traefik'
state: directory
mode: '0755'
become: true
- name: Create volume directory for traefik certificates
ansible.builtin.file:
path: '{{ base_docker_volumes_dir }}/traefik/certs'
state: directory
mode: '0755'
become: true
- name: Template traefik configuration files
ansible.builtin.template:
src: '{{ item }}.toml.j2'
dest: '{{ base_docker_volumes_dir }}/traefik/{{ item }}.toml'
mode: '0644'
loop:
- traefik
- dynamic
become: true
- name: Install passlib python package
ansible.builtin.pip:
name: passlib
state: present
become: true
- name: Generate passwords for Traefik users
ansible.builtin.set_fact:
traefik_user_passwords: "{{ traefik_user_passwords | default({}) | combine({item: lookup('ansible.builtin.password', '/dev/null', length=32, chars=['ascii_letters', 'digits'])}) }}" # noqa yaml[line-length]
loop: '{{ traefik_auth_users }}'
no_log: true
- name: Create decrypted password file
ansible.builtin.copy:
content: |
{% for user, password in traefik_user_passwords.items() %}
{{ user }}:{{ password }}
{% endfor %}
dest: '{{ base_docker_volumes_dir }}/traefik/passwords.decrypted'
mode: '0640'
become: true
- name: Create password file for traefik with hashed passwords
community.general.htpasswd:
path: '{{ base_docker_volumes_dir }}/traefik/passwords'
name: '{{ item.key }}'
password: '{{ item.value }}'
crypt_scheme: bcrypt
create: true
mode: '0640'
state: present
loop: '{{ traefik_user_passwords | dict2items }}'
become: true
- name: Deploy traefik container
community.docker.docker_container:
name: traefik
image: 'traefik:v3.4.0'
state: started
recreate: true
env:
PORT: '80'
CLOUDFLARE_EMAIL: "{{ cloudflare_email_address | default('null') }}"
CLOUDFLARE_API_KEY: "{{ cloudflare_api_key | default('null') }}"
labels: 'traefik.http.services.traefik.loadbalancer.server.port=80'
network_mode: host
ports:
- '80:80'
- '443:443'
- '8081:8081'
volumes:
- '{{ base_docker_volumes_dir }}/traefik/traefik.toml:/etc/traefik/traefik.toml'
- '{{ base_docker_volumes_dir }}/traefik/dynamic.toml:/opt/traefik/dynamic.toml'
- '{{ base_docker_volumes_dir }}/traefik/certs:/certs'
- '{{ base_docker_volumes_dir }}/traefik/passwords:/etc/traefik/.htpasswd'
- '/var/run/docker.sock:/var/run/docker.sock'
restart: false
restart_policy: always

View File

@ -0,0 +1,71 @@
#jinja2:variable_start_string:'%%', variable_end_string:'%%'
[metrics]
[metrics.prometheus]
addEntryPointsLabels = true
addRoutersLabels = true
addServicesLabels = true
[http.serversTransports.default-transport]
insecureSkipVerify = true
[http.middlewares]
[http.middlewares.https_redirect.redirectScheme]
scheme = "https"
permanent = true
[http.middlewares.auth.basicAuth]
usersfile = "/etc/traefik/.htpasswd"
[http.middlewares.global-rate-limit.rateLimit]
average = 2000
burst = 3000
period = "1m"
[http.routers]
[http.routers.traefik-api]
rule = "Host(`%% traefik_web_ui_addr %%`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))"
service = "api@internal"
middlewares = ["auth"]
{% for service in traefik_services %}
[http.routers.%% service.name %%-http]
rule = "Host(`%% service.host %%`)"
service = "%% service.name %%"
entrypoints = ["http"]
# middlewares = ["https_redirect"]
[http.routers.%% service.name %%-https]
rule = "Host(`%% service.host %%`)"
service = "%% service.name %%"
entrypoints = ["https"]
{% if service.auth is defined and service.auth %}
middlewares = ["auth"]
{% endif %}
[http.routers.%% service.name %%-https.tls]
certResolver = "acme-http"
{% endfor %}
{% if traefik_services | length > 0 %}
[http.services]
{% for service in traefik_services %}
[http.services.%% service.name %%.loadBalancer]
serversTransport = "default-transport"
[[http.services.%% service.name %%.loadBalancer.servers]]
scheme = "http"
url = "%% service.service %%"
{% endfor %}
{% endif %}
{% if traefik_tcp_routers | length > 0 %}
[tcp.routers]
{% for router in traefik_tcp_routers %}
[tcp.routers.%% router.name %%-service-tcp]
rule = "HostSNI(`*`)"
service = "%% router.name %%-service"
entrypoints = ["%% router.entrypoint %%"]
{% endfor %}
[tcp.services]
{% for router in traefik_tcp_routers %}
[tcp.services.%% router.name %%-service.loadBalancer]
[[tcp.services.%% router.name %%-service.loadBalancer.servers]]
address = "%% router.target_host %%:%% router.target_port %%"
{% endfor %}
{% endif %}

View File

@ -0,0 +1,55 @@
#jinja2:variable_start_string:'%%', variable_end_string:'%%'
[accessLog]
[metrics]
[metrics.prometheus]
[ping]
[tracing]
addInternals = true
[log]
level = "DEBUG"
[entryPoints]
[entryPoints.http]
address = ":80"
asDefault = true
[entryPoints.http.http]
middlewares = ["global-rate-limit@file"]
[entryPoints.traefik]
address = ":8081"
[entryPoints.traefik.http]
middlewares = ["global-rate-limit@file"]
[entryPoints.https]
address = ":443"
[entryPoints.https.http]
middlewares = ["global-rate-limit@file"]
[entryPoints.https.http.tls]
certResolver = "acme-http"
{% if traefik_tcp_routers is defined %}
{% for router in traefik_tcp_routers %}
[entryPoints.%% router.entrypoint %%]
address = ":%% router.source_port %%"
{% endfor %}
{% endif %}
[api]
dashboard = true
insecure = true
[providers.file]
directory = "/opt/traefik/"
[certificatesResolvers.acme-http.acme]
email = "%% letsencrypt_email_address %%"
storage = "/certs/acme.json"
caServer = "https://acme-v02.api.letsencrypt.org/directory" # Production
# caServer = "https://acme-staging-v02.api.letsencrypt.org/directory" # Staging
[certificatesResolvers.acme-http.acme.httpChallenge]
entryPoint = "http"
[providers.docker]
endpoint = "unix:///var/run/docker.sock"
exposedByDefault = true

40
requirements.txt Normal file
View File

@ -0,0 +1,40 @@
ansible==11.8.0
ansible-compat==25.6.0
ansible-core==2.18.7
ansible-lint==25.7.0
attrs==25.3.0
black==25.1.0
bracex==2.6
certifi==2025.8.3
cffi==1.17.1
charset-normalizer==3.4.3
click==8.2.1
cryptography==45.0.6
filelock==3.18.0
idna==3.10
importlib_metadata==8.7.0
Jinja2==3.1.6
jsonschema==4.25.0
jsonschema-specifications==2025.4.1
MarkupSafe==3.0.2
mypy_extensions==1.1.0
netaddr==1.3.0
packaging==25.0
passlib==1.7.4
pathspec==0.12.1
platformdirs==4.3.8
pycparser==2.22
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.4
resolvelib==1.0.1
rpds-py==0.27.0
ruamel.yaml==0.18.14
ruamel.yaml.clib==0.2.12
setuptools==80.9.0
subprocess-tee==0.4.2
urllib3==2.5.0
wcmatch==10.1
wheel==0.45.1
yamllint==1.37.1
zipp==3.23.0

6
requirements.yml Normal file
View File

@ -0,0 +1,6 @@
---
collections:
- name: community.general
version: '==10.7.2'
- name: community.docker
version: '==4.6.1'

9
setup_venv.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip setuptools wheel
pip install -r requirements.txt
ANSIBLE_CONFIG=./ansible.cfg ansible-galaxy collection install -r requirements.yml