mirror of
https://ak-git.vectorsigma.ru/terghalin/metalcheck.git
synced 2025-10-26 07:15:53 +09:00
init(app): initial version
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -160,3 +160,5 @@ cython_debug/
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
app/resources.db
|
||||
|
||||
32
Dockerfile
Normal file
32
Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM debian:bookworm-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
build-essential \
|
||||
libssl-dev \
|
||||
zlib1g-dev \
|
||||
libbz2-dev \
|
||||
libreadline-dev \
|
||||
libsqlite3-dev \
|
||||
wget \
|
||||
git \
|
||||
libncurses5-dev \
|
||||
libffi-dev \
|
||||
liblzma-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install pyenv
|
||||
RUN curl https://pyenv.run | bash
|
||||
ENV PYENV_ROOT="/root/.pyenv"
|
||||
ENV PATH="$PYENV_ROOT/bin:$PATH"
|
||||
RUN pyenv install 3.10.12 && pyenv global 3.10.12
|
||||
RUN python --version
|
||||
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
1
app/.python-version
Normal file
1
app/.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.10.12
|
||||
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
79
app/database.py
Normal file
79
app/database.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import sqlite3
|
||||
import json
|
||||
from contextlib import closing
|
||||
|
||||
DB_PATH = "resources.db"
|
||||
|
||||
# Basic schema for the database
|
||||
def init_db():
|
||||
with closing(sqlite3.connect(DB_PATH)) as conn:
|
||||
with conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS metal_nodes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
location TEXT,
|
||||
vendor TEXT,
|
||||
cpu INTEGER,
|
||||
memory TEXT,
|
||||
storage TEXT,
|
||||
time_on_duty INTEGER,
|
||||
initial_cost REAL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS virtual_machines (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
location TEXT,
|
||||
cpu INTEGER,
|
||||
memory TEXT,
|
||||
storage TEXT,
|
||||
type TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS kubernetes_nodes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cluster_name TEXT NOT NULL,
|
||||
node_name TEXT NOT NULL,
|
||||
cpu INTEGER,
|
||||
memory TEXT,
|
||||
storage TEXT,
|
||||
type TEXT,
|
||||
namespaces TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
def insert_metal_node(name, location, vendor, cpu, memory, storage, time_on_duty, initial_cost):
|
||||
with closing(sqlite3.connect(DB_PATH)) as conn:
|
||||
with conn:
|
||||
conn.execute("""
|
||||
INSERT INTO metal_nodes (name, location, vendor, cpu, memory, storage, time_on_duty, initial_cost)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (name, location, vendor, cpu, memory, storage, time_on_duty, initial_cost))
|
||||
|
||||
def insert_virtual_machine(name, location, cpu, memory, storage, vm_type):
|
||||
with closing(sqlite3.connect(DB_PATH)) as conn:
|
||||
with conn:
|
||||
conn.execute("""
|
||||
INSERT INTO virtual_machines (name, location, cpu, memory, storage, type)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (name, location, cpu, memory, storage, vm_type))
|
||||
|
||||
def insert_kubernetes_node(cluster_name, node_name, cpu, memory, storage, node_type, namespaces):
|
||||
with closing(sqlite3.connect(DB_PATH)) as conn:
|
||||
with conn:
|
||||
conn.execute("""
|
||||
INSERT INTO kubernetes_nodes (cluster_name, node_name, cpu, memory, storage, type, namespaces)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""", (cluster_name, node_name, cpu, memory, storage, node_type, json.dumps(namespaces)))
|
||||
|
||||
def fetch_all(table):
|
||||
with closing(sqlite3.connect(DB_PATH)) as conn:
|
||||
with conn:
|
||||
cursor = conn.execute(f"SELECT * FROM {table}")
|
||||
return cursor.fetchall()
|
||||
70
app/extras/pseudographic.py
Normal file
70
app/extras/pseudographic.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Show pseudo-graphic tables using rich library, if there is no frontend
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from database import fetch_all
|
||||
|
||||
def display_metal_nodes():
|
||||
console = Console()
|
||||
table = Table(title="Metal Nodes")
|
||||
table.add_column("ID", justify="right")
|
||||
table.add_column("Name")
|
||||
table.add_column("Location")
|
||||
table.add_column("Vendor")
|
||||
table.add_column("CPU", justify="right")
|
||||
table.add_column("Memory")
|
||||
table.add_column("Storage")
|
||||
|
||||
nodes = fetch_all("metal_nodes")
|
||||
for node in nodes:
|
||||
table.add_row(
|
||||
str(node[0]), node[1], node[2], node[3],
|
||||
str(node[4]), node[5], node[6]
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
def display_virtual_machines():
|
||||
console = Console()
|
||||
table = Table(title="Virtual Machines")
|
||||
table.add_column("ID", justify="right")
|
||||
table.add_column("Name")
|
||||
table.add_column("Location")
|
||||
table.add_column("CPU", justify="right")
|
||||
table.add_column("Memory")
|
||||
table.add_column("Storage")
|
||||
table.add_column("Type")
|
||||
|
||||
vms = fetch_all("virtual_machines")
|
||||
for vm in vms:
|
||||
table.add_row(
|
||||
str(vm[0]), vm[1], vm[2],
|
||||
str(vm[3]), vm[4], vm[5], vm[6]
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
def display_kubernetes_nodes():
|
||||
console = Console()
|
||||
table = Table(title="Kubernetes Nodes")
|
||||
table.add_column("ID", justify="right")
|
||||
table.add_column("Cluster Name")
|
||||
table.add_column("Node Name")
|
||||
table.add_column("CPU", justify="right")
|
||||
table.add_column("Memory")
|
||||
table.add_column("Storage")
|
||||
table.add_column("Type")
|
||||
table.add_column("Namespaces")
|
||||
|
||||
nodes = fetch_all("kubernetes_nodes")
|
||||
for node in nodes:
|
||||
table.add_row(
|
||||
str(node[0]), node[1], node[2],
|
||||
str(node[3]), node[4], node[5],
|
||||
node[6], node[7]
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
if __name__ == "__main__":
|
||||
display_metal_nodes()
|
||||
display_virtual_machines()
|
||||
17
app/main.py
Normal file
17
app/main.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from fastapi import FastAPI
|
||||
from database import init_db
|
||||
from routes import metal, vm, k8s, export
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
init_db()
|
||||
|
||||
# Include routes
|
||||
app.include_router(metal.router)
|
||||
app.include_router(vm.router)
|
||||
app.include_router(k8s.router)
|
||||
app.include_router(export.router)
|
||||
|
||||
@app.get("/")
|
||||
def root():
|
||||
return {"message": "Welcome to Metal Check API"}
|
||||
5
app/requirements.txt
Normal file
5
app/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
# Here be dragons
|
||||
fastapi
|
||||
uvicorn
|
||||
pyyaml
|
||||
rich
|
||||
0
app/routes/__init__.py
Normal file
0
app/routes/__init__.py
Normal file
19
app/routes/export.py
Normal file
19
app/routes/export.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Export data in YAML or JSON format
|
||||
from fastapi import APIRouter
|
||||
from database import fetch_all
|
||||
import yaml
|
||||
import json
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.get("/export")
|
||||
def export_data(format: str = "yaml"):
|
||||
data = {
|
||||
"metal_nodes": fetch_all("metal_nodes"),
|
||||
"virtual_machines": fetch_all("virtual_machines"),
|
||||
"kubernetes_nodes": fetch_all("kubernetes_nodes"),
|
||||
}
|
||||
# Return data in the requested format
|
||||
if format.lower() == "yaml":
|
||||
return yaml.safe_dump(data)
|
||||
return json.dumps(data, indent=2)
|
||||
32
app/routes/k8s.py
Normal file
32
app/routes/k8s.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from typing import List
|
||||
from database import insert_kubernetes_node, fetch_all
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
class KubernetesNode(BaseModel):
|
||||
cluster_name: str
|
||||
node_name: str
|
||||
cpu: int
|
||||
memory: str
|
||||
storage: str
|
||||
type: str
|
||||
namespaces: List[str]
|
||||
|
||||
@router.get("/k8s/data")
|
||||
def get_k8s_data():
|
||||
return {"kubernetes_nodes": fetch_all("kubernetes_nodes")}
|
||||
|
||||
@router.post("/k8s/data")
|
||||
def add_k8s_data(node: KubernetesNode):
|
||||
insert_kubernetes_node(
|
||||
cluster_name=node.cluster_name,
|
||||
node_name=node.node_name,
|
||||
cpu=node.cpu,
|
||||
memory=node.memory,
|
||||
storage=node.storage,
|
||||
node_type=node.type,
|
||||
namespaces=node.namespaces
|
||||
)
|
||||
return {"message": f"Kubernetes node '{node.node_name}' in cluster '{node.cluster_name}' added successfully."}
|
||||
33
app/routes/metal.py
Normal file
33
app/routes/metal.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from database import insert_metal_node, fetch_all
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
class MetalNode(BaseModel):
|
||||
name: str
|
||||
location: str
|
||||
vendor: str
|
||||
cpu: int
|
||||
memory: str
|
||||
storage: str
|
||||
time_on_duty: int
|
||||
initial_cost: float
|
||||
|
||||
@router.get("/metal/data")
|
||||
def get_metal_data():
|
||||
return {"metal_nodes": fetch_all("metal_nodes")}
|
||||
|
||||
@router.post("/metal/data")
|
||||
def add_metal_data(node: MetalNode):
|
||||
insert_metal_node(
|
||||
name=node.name,
|
||||
location=node.location,
|
||||
vendor=node.vendor,
|
||||
cpu=node.cpu,
|
||||
memory=node.memory,
|
||||
storage=node.storage,
|
||||
time_on_duty=node.time_on_duty,
|
||||
initial_cost=node.initial_cost
|
||||
)
|
||||
return {"message": f"Metal node '{node.name}' added successfully."}
|
||||
29
app/routes/vm.py
Normal file
29
app/routes/vm.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
from database import insert_virtual_machine, fetch_all
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
class VirtualMachine(BaseModel):
|
||||
name: str
|
||||
location: str
|
||||
cpu: int
|
||||
memory: str
|
||||
storage: str
|
||||
type: str
|
||||
|
||||
@router.get("/vm/data")
|
||||
def get_vm_data():
|
||||
return {"virtual_machines": fetch_all("virtual_machines")}
|
||||
|
||||
@router.post("/vm/data")
|
||||
def add_vm_data(vm: VirtualMachine):
|
||||
insert_virtual_machine(
|
||||
name=vm.name,
|
||||
location=vm.location,
|
||||
cpu=vm.cpu,
|
||||
memory=vm.memory,
|
||||
storage=vm.storage,
|
||||
vm_type=vm.type
|
||||
)
|
||||
return {"message": f"Virtual machine '{vm.name}' added successfully."}
|
||||
0
app/services/k8s_service.py
Normal file
0
app/services/k8s_service.py
Normal file
0
app/services/metal_service.py
Normal file
0
app/services/metal_service.py
Normal file
0
app/services/vm_service.py
Normal file
0
app/services/vm_service.py
Normal file
61
examples/on-premise.json
Normal file
61
examples/on-premise.json
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"cluster_name": "production-cluster",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "node-1",
|
||||
"cpu": "4 cores",
|
||||
"memory": "16 GB",
|
||||
"storage": "500 GB",
|
||||
"type": "m6.xlarge",
|
||||
"namespaces": ["default", "dev", "test"]
|
||||
},
|
||||
{
|
||||
"name": "node-2",
|
||||
"cpu": "8 cores",
|
||||
"memory": "32 GB",
|
||||
"storage": "1 TB",
|
||||
"type": "c6.2xlarge",
|
||||
"namespaces": ["default", "prod"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
{
|
||||
"region": "Nuremberg",
|
||||
"instances": [
|
||||
{
|
||||
"cpu": "4 cores",
|
||||
"memory": "8 GB",
|
||||
"storage": "120 GB SSD",
|
||||
"type": "cx21"
|
||||
},
|
||||
{
|
||||
"cpu": "8 cores",
|
||||
"memory": "16 GB",
|
||||
"storage": "240 GB SSD",
|
||||
"type": "cx31"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
{
|
||||
"location": "New York Data Center",
|
||||
"machines": [
|
||||
{
|
||||
"vendor": "Dell PowerEdge R740",
|
||||
"cpu": "16 cores",
|
||||
"memory": "128 GB",
|
||||
"storage": "2 TB HDD",
|
||||
"time_on_duty": "24/7",
|
||||
"initial_cost": "$10,000"
|
||||
},
|
||||
{
|
||||
"vendor": "HPE ProLiant DL380 Gen10",
|
||||
"cpu": "32 cores",
|
||||
"memory": "256 GB",
|
||||
"storage": "4 TB SSD",
|
||||
"time_on_duty": "9/5",
|
||||
"initial_cost": "$15,000"
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user