Compare commits

..

22 Commits

Author SHA1 Message Date
eea4c61537 Quick A record for testing static website migration
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 17s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 1m25s
2026-01-31 12:29:49 -08:00
ee860c6e1f Common names now line up with hostnames in certificate through the 1 ingress (fire emoji)
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 8s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 21s
2026-01-13 23:18:41 -08:00
1c11410c2d More resource re-factors, upgrades and fixes for future work
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 4s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 16s
Housekeeping but the wiki got hosed :((
2026-01-07 00:53:11 -08:00
4d71994b85 Upgrading provider versions
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 4s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 18s
2026-01-07 00:21:12 -08:00
79cb4eb1a6 Cleaning up unused code
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 4s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 18s
2026-01-07 00:02:11 -08:00
e8817fe093 Adding wiki to DNS and opening it up on the ingress for public read access 2026-01-06 19:12:31 -08:00
97bffd2042 Adding note regarding git.shockrah.xyz & code.shockrah.xyz 2026-01-06 19:06:23 -08:00
37305fd74e Exposing 2222 in gitea service however ingress still needs configuration
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 6s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 20s
2026-01-06 00:06:47 -08:00
555124bf2f Shortening ingress definition
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 5s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 19s
2026-01-03 23:07:33 -08:00
e209da949b Adding wiki service with a basic page for now
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 6s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 20s
2026-01-03 21:43:16 -08:00
caa2eba639 Removing unused helm charts
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 6s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 20s
2025-12-28 19:30:13 -08:00
982669ed4a Cleaning up the logging namespace and resource as they are not getting value
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 7s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 24s
2025-12-12 14:41:29 -08:00
4446ef813f Fixing auto_scaler issue with root node pool in athens cluster 2025-12-12 14:40:54 -08:00
9dc2f1d769 Adding sample filese and fluent bit configs which still need some work
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 6s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 21s
2025-11-10 14:18:05 -08:00
01b7b4ced8 Moving logging related things to the new logging namespace
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 5s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 18s
2025-11-05 21:55:40 -08:00
29cdfcb695 openobserve inimal setup running now with it's own namespace and volumes
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 7s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 19s
2025-11-04 23:24:16 -08:00
bbbc9ed477 Upsizing the singular node to accomodate the new observability stack 2025-11-04 23:20:03 -08:00
d64c5526e6 Creating namespace for the openserve 2025-11-04 23:18:39 -08:00
469b3d08ce Adding hashicorp/random provider 2025-11-04 23:16:58 -08:00
7f5b3205d0 Ingress functional however this is all in a cooked af namespace
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 4s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 14s
2025-11-03 02:14:06 -08:00
67ff5ce729 Gitea appearing functional with the service in place, now waiting on LB setup
Some checks failed
Ansible Linting / ansible-lint (push) Failing after 4s
Secops Linting and Safety Checks / checkov-scan-s3 (push) Failing after 15s
2025-11-03 01:48:29 -08:00
6aadb47c61 Adding code.shockrah.xyz to DNS member list 2025-11-03 01:48:09 -08:00
13 changed files with 202 additions and 81 deletions

View File

@@ -39,6 +39,8 @@ locals {
{ name = "git.shockrah.xyz", records = [ var.vultr_host ] }, { name = "git.shockrah.xyz", records = [ var.vultr_host ] },
{ name = "sanity.shockrah.xyz", records = [ var.vke_lb ] }, { name = "sanity.shockrah.xyz", records = [ var.vke_lb ] },
{ name = "uptime.shockrah.xyz", records = [ var.vke_lb ] }, { name = "uptime.shockrah.xyz", records = [ var.vke_lb ] },
{ name = "code.shockrah.xyz", records = [ var.vke_lb ] },
{ name = "wiki.shockrah.xyz", records = [ var.vke_lb ] },
] ]
} }

View File

@@ -33,3 +33,11 @@ resource "aws_route53_record" "temper-tv-mx" {
"50 fb.mail.gandi.net.", "50 fb.mail.gandi.net.",
] ]
} }
resource "aws_route53_record" "temper-tv-test" {
zone_id = aws_route53_zone.temper-tv.id
name = "test.temper.tv"
type = "A"
ttl = 300
records = [ var.vke_lb ]
}

View File

@@ -9,7 +9,7 @@ terraform {
required_providers { required_providers {
aws = { aws = {
source = "hashicorp/aws" source = "hashicorp/aws"
version = "5.98.0" version = "6.27.0"
} }
vultr = { vultr = {
source = "vultr/vultr" source = "vultr/vultr"
@@ -17,7 +17,7 @@ terraform {
} }
kubernetes = { kubernetes = {
source = "hashicorp/kubernetes" source = "hashicorp/kubernetes"
version = "2.37.1" version = "3.0.1"
} }
kubectl = { kubectl = {
source = "gavinbunney/kubectl" source = "gavinbunney/kubectl"
@@ -27,11 +27,14 @@ terraform {
source = "hashicorp/helm" source = "hashicorp/helm"
version = "3.0.2" version = "3.0.2"
} }
tls = { tls = {
source = "hashicorp/tls" source = "hashicorp/tls"
version = "4.1.0" version = "4.1.0"
} }
random = {
source = "hashicorp/random"
version = "3.7.2"
}
} }
} }

View File

@@ -10,6 +10,7 @@ resource vultr_kubernetes athens {
label = var.cluster.pools["main"].label label = var.cluster.pools["main"].label
min_nodes = var.cluster.pools["main"].min_nodes min_nodes = var.cluster.pools["main"].min_nodes
max_nodes = var.cluster.pools["main"].max_nodes max_nodes = var.cluster.pools["main"].max_nodes
auto_scaler = true
} }
} }

View File

@@ -8,16 +8,3 @@
# port = each.value # port = each.value
# } # }
resource vultr_firewall_group bastion {
description = "For connections into and out of the bastion host"
}
resource vultr_firewall_rule bastion_inbound {
firewall_group_id = vultr_firewall_group.bastion.id
protocol = "tcp"
ip_type = "v4"
subnet = "0.0.0.0"
subnet_size = 0
port = 22
}

View File

@@ -0,0 +1,74 @@
# NOTE: this is a simple deployment for demo purposes only.
# Currently it does support SSH access and lacks Gitea runners.
# However a fully working setup can be found at: https://git.shockrah.xyz
resource kubernetes_deployment gitea {
metadata {
name = "gitea"
namespace = var.playground.namespace
labels = {
"app" = "gitea"
}
}
spec {
replicas = 1
selector {
match_labels = {
"app" = "gitea"
}
}
template {
metadata {
labels = {
"app" = "gitea"
}
}
spec {
container {
name = "gitea"
image = "gitea/gitea:latest"
port {
container_port = 3000
name = "gitea-main"
}
port {
container_port = 2222
name = "gitea-ssh"
}
volume_mount {
name = "gitea"
mount_path = "/data"
}
}
volume {
name = "gitea"
persistent_volume_claim {
claim_name = kubernetes_persistent_volume_claim_v1.gitea.metadata[0].name
}
}
}
}
}
}
resource kubernetes_service gitea {
metadata {
name = "gitea"
namespace = var.playground.namespace
}
spec {
selector = {
"app" = "gitea"
}
port {
target_port = "gitea-main"
port = 3000
name = "http"
}
port {
target_port = "gitea-ssh"
port = 2222
name = "ssh"
}
}
}

View File

@@ -1,4 +1,4 @@
resource kubernetes_deployment health { resource kubernetes_deployment_v1 health {
metadata { metadata {
name = "health" name = "health"
namespace = var.playground.namespace namespace = var.playground.namespace
@@ -29,7 +29,7 @@ resource kubernetes_deployment health {
} }
} }
resource kubernetes_service health { resource kubernetes_service_v1 health {
metadata { metadata {
name = "health" name = "health"
namespace = var.playground.namespace namespace = var.playground.namespace

View File

@@ -1,3 +1,11 @@
locals {
services = {
"code.shockrah.xyz" = kubernetes_service.gitea
"sanity.shockrah.xyz" = kubernetes_service_v1.health
"uptime.shockrah.xyz" = kubernetes_service.kuma
"wiki.shockrah.xyz" = kubernetes_service.otterwiki
}
}
resource kubernetes_ingress_v1 health { resource kubernetes_ingress_v1 health {
metadata { metadata {
name = "health-ingress" name = "health-ingress"
@@ -9,44 +17,31 @@ resource kubernetes_ingress_v1 health {
} }
spec { spec {
ingress_class_name = "nginx" ingress_class_name = "nginx"
tls { dynamic tls {
hosts = [ for_each = local.services
"sanity.shockrah.xyz", content {
"uptime.shockrah.xyz" hosts = [tls.key]
] secret_name = "${tls.value.metadata[0].name}-secret"
secret_name = "shockrah"
} }
rule { }
host = "sanity.shockrah.xyz" dynamic "rule" {
for_each = local.services
content {
host = "${rule.key}"
http { http {
path { path {
path = "/" path = "/"
backend { backend {
service { service {
name = kubernetes_service.health.metadata[0].name name = rule.value.metadata[0].name
port { port {
number = kubernetes_service.health.spec[0].port[0].port number = rule.value.spec[0].port[0].port
} }
} }
} }
} }
} }
} }
rule {
host = "uptime.shockrah.xyz"
http {
path {
path = "/"
backend {
service {
name = kubernetes_service.kuma.metadata[0].name
port {
number = kubernetes_service.kuma.spec[0].port[0].port
}
}
}
}
}
} }
} }
} }

View File

@@ -7,11 +7,4 @@ resource kubernetes_namespace playground {
} }
} }
resource kubernetes_namespace dev {
metadata {
annotations = {
names = "dev"
}
name = "dev"
}
}

View File

@@ -40,22 +40,10 @@ variable cluster {
variable playground { variable playground {
type = object({ type = object({
namespace = string namespace = string
health = object({ # TODO: Re-incorporate this var for templating later
dns = string
})
tls = object({ tls = object({
email = string email = string
}) })
}) })
} }
variable bastion {
type = object({
plan = string
os = string
label = string
})
}

View File

@@ -1,7 +1,7 @@
cluster = { cluster = {
region = "lax" region = "lax"
label = "athens-cluster" label = "athens-cluster"
version = "v1.33.0+3" version = "v1.34.1+2"
pools = { pools = {
main = { main = {
node_quantity = 1 node_quantity = 1
@@ -18,17 +18,7 @@ playground = {
namespace = "playground" namespace = "playground"
# Sanity check service that is used purely for the sake of ensuring # Sanity check service that is used purely for the sake of ensuring
# things are ( at a basic level ) functional # things are ( at a basic level ) functional
health = {
dns = "health"
}
tls = { tls = {
email = "dev@shockrah.xyz" email = "dev@shockrah.xyz"
} }
} }
bastion = {
plan = "vc2-1c-2gb"
label = "bastion"
os = "1743"
}

View File

@@ -18,7 +18,24 @@ resource kubernetes_persistent_volume_claim_v1 kuma {
resource kubernetes_persistent_volume_claim_v1 gitea { resource kubernetes_persistent_volume_claim_v1 gitea {
metadata { metadata {
name = "gitea-data" name = "gitea-data"
namespace = kubernetes_namespace.dev.metadata[0].name namespace = var.playground.namespace
}
spec {
volume_mode = "Filesystem"
access_modes = [ "ReadWriteOnce"]
resources {
requests = {
storage = "10Gi"
}
}
}
}
resource kubernetes_persistent_volume_claim_v1 otterwiki {
metadata {
name = "otterwiki-data"
namespace = var.playground.namespace
} }
spec { spec {
volume_mode = "Filesystem" volume_mode = "Filesystem"

View File

@@ -0,0 +1,63 @@
resource kubernetes_deployment otterwiki {
metadata {
name = "otterwiki"
namespace = var.playground.namespace
labels = {
"app" = "otterwiki"
}
}
spec {
replicas = 1
selector {
match_labels = {
"app" = "otterwiki"
}
}
template {
metadata {
labels = {
"app" = "otterwiki"
}
}
spec {
container {
name = "otterwiki"
image = "redimp/otterwiki:2"
port {
container_port = 8080
name = "otterwiki-main"
}
volume_mount {
name = "otterwiki-data"
mount_path = "/var/lib/otterwiki"
}
}
volume {
name = "otterwiki-data"
persistent_volume_claim {
claim_name = kubernetes_persistent_volume_claim_v1.otterwiki.metadata[0].name
}
}
}
}
}
}
resource kubernetes_service otterwiki {
metadata {
name = "otterwiki"
namespace = var.playground.namespace
}
spec {
selector = {
"app" = "otterwiki"
}
port {
port = 80
target_port = "otterwiki-main"
protocol = "TCP"
name = "http"
}
}
}