working nice. docker uses bun

This commit is contained in:
2025-08-12 23:40:08 +02:00
parent f0922f485d
commit ea9cdb7692
83 changed files with 3005 additions and 422 deletions

135
src/lib/server/database.ts Normal file
View File

@@ -0,0 +1,135 @@
import { currentFilament } from './filament';
import { open } from 'sqlite';
import sqlite3 from 'sqlite3';
import { resolve } from 'path';
import { fileURLToPath } from 'url';
import { dirname } from 'path';
import type { Filament } from '$lib/interfaces/printer';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const dbPath = resolve(__dirname, '../../../db.sqlite');
let db;
async function initDb() {
const db = await open({
filename: dbPath,
driver: sqlite3.Database
});
// Transaction to run schemas
await db.exec('BEGIN TRANSACTION');
try {
for (const stmt of schemas) {
await db.run(stmt);
}
await db.exec('COMMIT');
} catch (err) {
console.error('Failed to create tables:', err.message);
await db.exec('ROLLBACK');
}
return db;
}
const schemas = [
`
CREATE TABLE IF NOT EXISTS filament (
id INTEGER PRIMARY KEY AUTOINCREMENT,
hex TEXT NOT NULL,
color TEXT NOT NULL,
material TEXT,
weight REAL,
link TEXT,
added INTEGER, -- epoch seconds
updated INTEGER -- epoch seconds
)
`
];
async function seedData(db) {
const baseTimestamp = Math.floor(new Date('2025-04-01T05:47:01+00:00').getTime() / 1000);
const filaments = currentFilament();
const stmt = await db.prepare(`
INSERT OR IGNORE INTO filament (hex, color, material, weight, link, added, updated)
VALUES (?, ?, ?, ?, ?, ?, ?)
`);
await db.exec('BEGIN TRANSACTION');
try {
for (const f of filaments) {
const existing = await db.get('SELECT 1 FROM filament WHERE hex = ? AND updated = ?', [
f.hex,
baseTimestamp
]);
if (!existing) {
await db.run(
`INSERT INTO filament (hex, color, material, weight, link, added, updated)
VALUES (?, ?, ?, ?, ?, ?, ?)`,
[f.hex, f.color, f.material, f.weight, f.link, baseTimestamp, baseTimestamp]
);
}
}
await db.exec('COMMIT');
} catch (err) {
console.error('Failed to seed data:', err.message);
await db.exec('ROLLBACK');
} finally {
await stmt.finalize();
}
}
// Export helper to use db elsewhere
async function getDb() {
if (db !== undefined) return db;
db = await initDb();
await seedData(db);
console.log('Database setup and seeding complete!');
}
export async function getAllFilament(): Promise<Array<Filament>> {
const db = await getDb();
const result = await db?.all('SELECT * FROM filament');
return result || [];
}
export async function getFilamentByColor(name: string) {
const db = await getDb();
const result = await db?.get('SELECT * FROM filament WHERE LOWER(color) = ?', [name]);
return result || undefined;
}
export async function addFilament(
hex: string,
color: string,
material: string,
weight: number,
link: string
) {
const timestamp = Math.floor(new Date().getTime() / 1000);
const db = await getDb();
const result = await db.run(
`INSERT INTO filament (hex, color, material, weight, link, added, updated)
VALUES (?, ?, ?, ?, ?, ?, ?)`,
[hex, color, material, weight, link, timestamp, timestamp]
);
return { id: result.lastID };
}
export async function updatefilament({ id, make, model, year }) {
const db = await getDb();
await db.run(
'UPDATE filaments SET make = ?, model = ?, year = ? WHERE id = ?',
make,
model,
year,
id
);
}

View File

@@ -0,0 +1,54 @@
import { request, Agent } from 'https';
import tls, { type PeerCertificate } from 'tls';
const SSL_WEBSERVER = '10.0.0.53';
export async function getSSLInfo(url: string, port = 443) {
if (new URL(url).protocol !== 'https:') return { raw: 'none' };
const hostname = new URL(url).hostname;
return new Promise((resolve, reject) => {
const socket = tls.connect(port, SSL_WEBSERVER, { servername: hostname }, () => {
const cert = socket.getPeerCertificate(true);
if (!cert || Object.keys(cert).length === 0) {
reject(new Error('No certificate found'));
return;
}
resolve({
subject: cert.subject,
issuer: cert.issuer,
valid_from: cert.valid_from,
valid_to: cert.valid_to,
fingerprint: cert.fingerprint,
fingerprint256: cert.fingerprint256,
ca: cert.ca,
nistCurve: cert.nistCurve,
asn1Curve: cert.asn1Curve,
serialNumber: cert.serialNumber,
altNames: cert.subjectaltname,
publicKey: cert?.pubkey?.toString('base64') || '',
infoAccess: cert?.infoAccess || ''
});
socket.end();
});
socket.on('error', (err) => {
reject(err);
});
});
}
export async function healthOk(url: string): Promise<number> {
return fetch(url, { signal: AbortSignal.timeout(400) })
.then((resp) => {
return resp.status;
})
.catch((error) => {
console.log('got error from health endpoint for url:', url);
console.log(error);
return 550;
});
}

View File

@@ -16,19 +16,70 @@ function buildHomeassistantRequest() {
return { url, options };
}
const attributes = {
current_stage: null,
print_status: null,
bed_temperature: null,
nozzle_temperature: null,
total_usage: null,
nozzle_type: null,
nozzle_size: null,
print_bed_type: null,
current_layer: null,
total_layer_count: null,
print_progress: null,
print_length: null,
print_weight: null,
sd_card_status: null,
speed_profile: null,
wi_fi_signal: null,
end_time: null,
cover_image: null,
pick_image: null,
camera: null
};
interface PrinterState {
[key: string]: {
value: string;
unit?: string;
picture?: string;
};
}
function printerState(data: object) {
const state: PrinterState = {};
const keys = Object.keys(attributes);
for (let i = 0; i < keys.length; i++) {
const k = keys[i];
const value = data?.filter((el) => el.entity_id.includes(k))[0];
if (!value) continue;
state[k] = { value: value.state };
if (value?.attributes?.unit_of_measurement)
state[k]['unit'] = value.attributes.unit_of_measurement;
if (value?.attributes?.entity_picture) state[k]['picture'] = value.attributes.entity_picture;
}
return state;
}
async function fetchHassStates() {
const { url, options } = buildHomeassistantRequest();
return fetch(url, options).then((resp) => resp.json());
}
export async function fetchP1P(): Promise<Entity[]> {
export async function fetchP1P(): Promise<PrinterState> {
try {
let hassStates = await fetchHassStates();
hassStates = hassStates.filter(
(el: Entity) => el.attributes.friendly_name?.includes('P1P') === true
);
return hassStates;
return printerState(hassStates);
} catch (error) {
console.log('ERROR! from fetchP1P:', error);
return Promise.reject(null);

View File

@@ -1,9 +1,46 @@
import * as k8s from '@kubernetes/client-node';
import stream from 'stream';
import { writable } from 'svelte/store';
import fs from 'fs';
import { env } from '$env/dynamic/private';
/*
const kubeCaPath =
env.KUBERNETES_CA_CERT_PATH || '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt';
const kubeCaCert = fs.readFileSync(kubeCaPath, 'utf8');
// const kubeSaTokenPath = env.KUBERNETES_SA_TOKEN_PATH || '/var/run/secrets/kubernetes.io/serviceaccount/token';
const token = fs.readFileSync(kubeSaTokenPath, 'utf8');
*/
const kubeConfig: k8s.KubeConfig = {
clusters: [
{
name: 'kazan',
server: env.KUBERNETES_SERVICE_HOST || 'https://kubernetes.default.svc',
// caData: kubeCaCert,
// skipTLSVerify: true
skipTLSVerify: true
}
],
users: [
{
name: 'pod-user',
token: env.KUBERNETES_SA_TOKEN
}
],
contexts: [
{
name: 'default-context',
user: 'pod-user',
cluster: 'kazan'
}
],
currentContext: 'default-context'
};
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
kc.loadFromOptions(kubeConfig);
const k8sApi = kc.makeApiClient(k8s.CoreV1Api);
const appsV1Api = kc.makeApiClient(k8s.AppsV1Api);
@@ -92,12 +129,16 @@ export function createLogStream(podName: string, namespace: string, containerNam
});
console.log('setting logAbortController, prev:', logAbortController);
logAbortController = await k8sLog.log(namespace, podName, containerName, liveStream, {
follow: true,
timestamps: false,
pretty: false,
tailLines: maxLines
});
try {
logAbortController = await k8sLog.log(namespace, podName, containerName, liveStream, {
follow: true,
timestamps: false,
pretty: false,
tailLines: maxLines
});
} catch (error) {
console.log('ERROR SETTING UP WS', error);
}
}
function stop() {

View File

@@ -2,8 +2,8 @@ import { env } from '$env/dynamic/private';
import type { Cluster, Node } from '$lib/interfaces/proxmox';
function buildProxmoxRequest() {
const url = env.PROXMOX_URL || 'https://10.0.0.50:8006/api2/json/';
const token = env.PROXMOX_TOKEN || 'REPLACE_WITH_PROXMOX_TOKEN';
const url = env.PROXMOX_URL;
const token = env.PROXMOX_TOKEN;
const options = {
method: 'GET',
headers: {

View File

@@ -3,7 +3,7 @@ import { env } from '$env/dynamic/private';
const TRAEFIK_HTTP_URL = '/api/http';
function buildTraefikRequest(path: string) {
const baseURL = env.TRAEFIK_URL || 'http://localhost:9000';
const baseURL = env.TRAEFIK_URL;
const url = `${baseURL}${TRAEFIK_HTTP_URL}/${path}`;
const options = {
method: 'GET',
@@ -12,6 +12,7 @@ function buildTraefikRequest(path: string) {
}
};
console.log('making traefik request', url);
return { url, options };
}