Compare commits
3 Commits
029a3c80d5
...
ee2d502ca6
Author | SHA1 | Date | |
---|---|---|---|
ee2d502ca6 | |||
88059a5e0f | |||
4024809cc4 |
@ -9,15 +9,15 @@ terraform {
|
||||
required_providers {
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
version = "~> 5.0"
|
||||
version = "5.98.0"
|
||||
}
|
||||
vultr = {
|
||||
source = "vultr/vultr"
|
||||
version = "2.22.1"
|
||||
version = "2.26.0"
|
||||
}
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
version = "2.34.0"
|
||||
version = "2.37.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,27 +2,14 @@ resource vultr_kubernetes athens {
|
||||
region = var.cluster.region
|
||||
version = var.cluster.version
|
||||
label = var.cluster.label
|
||||
# BUG: only have this set when creating the resource for the first time
|
||||
# once the cluster is up, we should comment this out again
|
||||
# enable_firewall = true
|
||||
node_pools {
|
||||
node_quantity = 1
|
||||
plan = var.cluster.pools["meta"].plan
|
||||
label = var.admin_services.namespace
|
||||
min_nodes = var.cluster.pools["meta"].min
|
||||
max_nodes = var.cluster.pools["meta"].max
|
||||
# tag = var.admin_services.namespace
|
||||
}
|
||||
}
|
||||
|
||||
resource vultr_kubernetes_node_pools games {
|
||||
cluster_id = vultr_kubernetes.athens.id
|
||||
node_quantity = var.cluster.pools["games"].min
|
||||
plan = var.cluster.pools["games"].plan
|
||||
label = var.game_servers.namespace
|
||||
min_nodes = var.cluster.pools["games"].min
|
||||
max_nodes = var.cluster.pools["games"].max
|
||||
tag = var.game_servers.namespace
|
||||
node_pools {
|
||||
node_quantity = var.cluster.pools["main"].min_nodes
|
||||
plan = var.cluster.pools["main"].plan
|
||||
label = var.cluster.pools["main"].label
|
||||
min_nodes = var.cluster.pools["main"].min_nodes
|
||||
max_nodes = var.cluster.pools["main"].max_nodes
|
||||
}
|
||||
}
|
||||
|
||||
output k8s_config {
|
||||
|
4
infra/vultr-kubernetes/dev/.gitignore
vendored
4
infra/vultr-kubernetes/dev/.gitignore
vendored
@ -1,4 +0,0 @@
|
||||
# created by virtualenv automatically
|
||||
bin/
|
||||
lib/
|
||||
|
@ -1,59 +0,0 @@
|
||||
from argparse import ArgumentParser
|
||||
from argparse import Namespace
|
||||
from kubernetes import client, config
|
||||
import re
|
||||
|
||||
def get_args() -> Namespace:
|
||||
parser = ArgumentParser(
|
||||
prog="Cluster Search Thing",
|
||||
description="General utility for finding resources for game server bot"
|
||||
)
|
||||
games = {"health", "reflex", "minecraft"}
|
||||
parser.add_argument('-g', '--game', required=False, choices=games)
|
||||
|
||||
admin = {"health"}
|
||||
parser.add_argument('-a', '--admin', required=False, choices=admin)
|
||||
return parser.parse_args()
|
||||
|
||||
def k8s_api(config_path: str) -> client.api.core_v1_api.CoreV1Api:
|
||||
config.load_kube_config("../config.yaml")
|
||||
return client.CoreV1Api()
|
||||
|
||||
def get_admin_service_details(args: ArgumentParser, api: client.api.core_v1_api.CoreV1Api):
|
||||
print('admin thing requested', args.admin)
|
||||
services = api.list_service_for_all_namespaces(label_selector=f'app={args.admin}')
|
||||
if len(services.items) == 0:
|
||||
print(f'Unable to find {args.admin} amongst the admin-services')
|
||||
return
|
||||
|
||||
port = services.items[0].spec.ports[0].port
|
||||
node_ips = list(filter(lambda a: a.type == 'ExternalIP', api.list_node().items[0].status.addresses))
|
||||
ipv4 = list(filter(lambda item: not re.match('[\d\.]{3}\d', item.address), node_ips))[0].address
|
||||
ipv6 = list(filter(lambda item: re.match('[\d\.]{3}\d', item.address), node_ips))[0].address
|
||||
|
||||
print(f'{args.admin} --> {ipv4}:{port} ~~> {ipv6}:{port}')
|
||||
|
||||
def get_game_server_ip(args: ArgumentParser, api: client.api.core_v1_api.CoreV1Api):
|
||||
services = api.list_service_for_all_namespaces(label_selector=f'app={args.game}')
|
||||
port = services.items[0].spec.ports[0].port
|
||||
|
||||
# Collecting the IPV4 of the node that contains the pod(container)
|
||||
# we actually care about. Since these pods only have 1 container
|
||||
# Now we collect specific data about the game server we requested
|
||||
node_ips = list(filter(lambda a: a.type == 'ExternalIP', api.list_node().items[0].status.addresses))
|
||||
ipv4 = list(filter(lambda item: not re.match('[\d\.]{3}\d', item.address), node_ips))[0].address
|
||||
ipv6 = list(filter(lambda item: re.match('[\d\.]{3}\d', item.address), node_ips))[0].address
|
||||
|
||||
print(f'{args.game} --> {ipv4}:{port} ~~> {ipv6}:{port}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = get_args()
|
||||
api = k8s_api('../config.yaml')
|
||||
|
||||
if args.game:
|
||||
get_game_server_ip(args, api)
|
||||
|
||||
if args.admin:
|
||||
get_admin_service_details(args, api)
|
||||
|
@ -1,8 +0,0 @@
|
||||
home = /usr
|
||||
implementation = CPython
|
||||
version_info = 3.10.12.final.0
|
||||
virtualenv = 20.13.0+ds
|
||||
include-system-site-packages = false
|
||||
base-prefix = /usr
|
||||
base-exec-prefix = /usr
|
||||
base-executable = /usr/bin/python3
|
@ -1,18 +0,0 @@
|
||||
cachetools==5.5.0
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.0
|
||||
durationpy==0.9
|
||||
google-auth==2.36.0
|
||||
idna==3.10
|
||||
kubernetes==31.0.0
|
||||
oauthlib==3.2.2
|
||||
pyasn1==0.6.1
|
||||
pyasn1_modules==0.4.1
|
||||
python-dateutil==2.9.0.post0
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
requests-oauthlib==2.0.0
|
||||
rsa==4.9
|
||||
six==1.17.0
|
||||
urllib3==2.2.3
|
||||
websocket-client==1.8.0
|
@ -1,55 +0,0 @@
|
||||
resource kubernetes_namespace game-servers {
|
||||
count = length(var.game_servers.configs) > 0 ? 1 : 0
|
||||
metadata {
|
||||
name = var.game_servers.namespace
|
||||
}
|
||||
}
|
||||
|
||||
resource kubernetes_pod game {
|
||||
for_each = var.game_servers.configs
|
||||
|
||||
metadata {
|
||||
name = each.key
|
||||
namespace = var.game_servers.namespace
|
||||
labels = {
|
||||
app = each.key
|
||||
}
|
||||
}
|
||||
spec {
|
||||
container {
|
||||
image = each.value.image
|
||||
name = coalesce(each.value.name, each.key)
|
||||
resources {
|
||||
limits = {
|
||||
cpu = each.value.cpu
|
||||
memory = each.value.mem
|
||||
}
|
||||
}
|
||||
port {
|
||||
container_port = each.value.port.internal
|
||||
protocol = coalesce(each.value.proto, "TCP")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource kubernetes_service game {
|
||||
for_each = var.game_servers.configs
|
||||
metadata {
|
||||
name = each.key
|
||||
namespace = var.game_servers.namespace
|
||||
labels = {
|
||||
app = each.key
|
||||
}
|
||||
}
|
||||
spec {
|
||||
selector = {
|
||||
app = each.key
|
||||
}
|
||||
port {
|
||||
target_port = each.value.port.internal
|
||||
port = each.value.port.expose
|
||||
}
|
||||
type = "NodePort"
|
||||
}
|
||||
}
|
@ -26,46 +26,31 @@ variable cluster {
|
||||
label = string
|
||||
version = string
|
||||
pools = map(object({
|
||||
node_quantity = number
|
||||
plan = string
|
||||
autoscale = bool
|
||||
min = number
|
||||
max = number
|
||||
label = string
|
||||
min_nodes = number
|
||||
max_nodes = number
|
||||
tag = string
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
variable game_servers {
|
||||
type = object({
|
||||
namespace = string
|
||||
configs = map(object({
|
||||
name = optional(string)
|
||||
image = string
|
||||
cpu = string
|
||||
mem = string
|
||||
port = object({
|
||||
internal = number
|
||||
expose = number
|
||||
})
|
||||
proto = optional(string)
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
variable admin_services {
|
||||
type = object({
|
||||
namespace = string
|
||||
configs = map(object({
|
||||
name = string
|
||||
image = string
|
||||
cpu = string
|
||||
mem = string
|
||||
port = object({
|
||||
notes = optional(string)
|
||||
internal = number
|
||||
expose = number
|
||||
})
|
||||
proto = optional(string)
|
||||
}))
|
||||
})
|
||||
}
|
||||
# variable admin_services {
|
||||
# type = object({
|
||||
# namespace = string
|
||||
# configs = map(object({
|
||||
# name = string
|
||||
# image = string
|
||||
# cpu = string
|
||||
# mem = string
|
||||
# port = object({
|
||||
# notes = optional(string)
|
||||
# internal = number
|
||||
# expose = number
|
||||
# })
|
||||
# proto = optional(string)
|
||||
# }))
|
||||
# })
|
||||
# }
|
||||
|
||||
|
@ -1,42 +1,15 @@
|
||||
cluster = {
|
||||
region = "lax"
|
||||
label = "athens-cluster"
|
||||
version = "v1.31.2+1"
|
||||
version = "v1.33.0+1"
|
||||
pools = {
|
||||
meta = {
|
||||
main = {
|
||||
node_quantity = 1
|
||||
plan = "vc2-1c-2gb"
|
||||
autoscale = true
|
||||
min = 1
|
||||
max = 2
|
||||
}
|
||||
games = {
|
||||
plan = "vc2-1c-2gb"
|
||||
autoscale = true
|
||||
min = 1
|
||||
max = 3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
game_servers = {
|
||||
namespace = "games"
|
||||
configs = {
|
||||
}
|
||||
}
|
||||
|
||||
admin_services = {
|
||||
namespace = "admin-services"
|
||||
configs = {
|
||||
health = {
|
||||
image = "nginx:latest"
|
||||
name = "health"
|
||||
cpu = "200m"
|
||||
mem = "64Mi"
|
||||
port = {
|
||||
notes = "Basic nginx sanity check service"
|
||||
expose = 30800
|
||||
internal = 80
|
||||
}
|
||||
label = "main"
|
||||
min_nodes = 1
|
||||
max_nodes = 2
|
||||
tag = "athens-main"
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user