diff --git a/chronos_npm_package/chronos.ts b/chronos_npm_package/chronos.ts index 21eb39799..5afe06234 100644 --- a/chronos_npm_package/chronos.ts +++ b/chronos_npm_package/chronos.ts @@ -1,375 +1,4 @@ -// // import hpropagate from 'hpropagate'; -// // import postgres from './controllers/postgres'; -// // import mongo from './controllers/mongo'; -// // import MongoClientWrapper from './wrappers/MongoClientWrapper'; -// // import MongoServerWrapper from './wrappers/MongoServerWrapper'; -// // import PostgresClientWrapper from './wrappers/PostgresClientWrapper'; -// // import PostgresServerWrapper from './wrappers/PostgresServerWrapper'; -// // import utilities from './controllers/utilities'; - -// // /** -// // * ********************************** -// // * CMD CONFIG FILE SETUP -// // * -// // * @field microservice {string} REQUIRED -// // * The user specified name for the microservice being tracked -// // * -// // * @field interval {number} DEFAULT 60000 -// // * The interval for every microservice health check in milliseconds -// // * This defaults to 60000 ms or 1 minute -// // * -// // * @field database {Object} -// // * Takes two properties -// // * - type {string} Either PostgreSQL or MongoDB -// // * - URI {string} Database uri -// // * -// // * @field notifications {array} OPTIONAL -// // * Varies per notification method -// // * ********************************** -// // */ - -// // class Chronos { -// // constructor(config) { -// // if (config === undefined) { -// // throw new Error('Chronos config is undefined'); -// // } - -// // // Validate all input fields exist and setup notifications -// // config = utilities.validateInput(config); -// // config = utilities.addNotifications(config); -// // this.config = config; -// // } - -// // propagate() { -// // /** -// // * Places an unique x-correlating-id into the headers of each request/response. -// // * This is used for tracking the life cycle of the request until the response -// // */ -// // hpropagate({ propagateInResponses: true }); -// // } - -// // track() { -// // /** -// // * ********************************************** -// // * MAIN CONTROLLER -// // * Only supports MongoDB and PostgreSQL for now! -// // * ********************************************** -// // */ -// // const { database, dockerized } = this.config; - -// // /** -// // * If the provided database is Mongo -// // * - Connection is made to MongoDB via the provided URI by the user. -// // * -// // * - 'services' collection will be created if not already and stores every microservice -// // * that is apart of the application. -// // * -// // * - Information is collected if the microservice is containerized -// // * -// // * - 'communications' collection will be created which creates a new document for every -// // * endpoint that the user Request travels through (tracked with hpropograte) for express routes -// // */ -// // if (database.type === 'MongoDB') { -// // //mongo is referring to controller function defined locally, not the actual mongodb object -// // mongo.connect(this.config); -// // mongo.services(this.config); -// // dockerized ? mongo.docker(this.config) : mongo.health(this.config); - -// // if (database.connection === 'REST') { -// // return mongo.communications(this.config); -// // } -// // } else if (database.type === 'PostgreSQL') { -// // /** -// // * If the provided database is PostgreSQL -// // * - Connection is made to the postgres client via the provided URI by the user. -// // * -// // * - 'services' table will be created if not already and stores every microservice -// // * that is apart of the application. -// // * -// // * - Information is collected if the microservice is containerized -// // * -// // * - 'communications' table will be created which creates a new row entry for every -// // * endpoint that the user Request travels through (tracked with hpropograte) -// // */ -// // postgres.connect(this.config); -// // postgres.services(this.config); -// // dockerized ? postgres.docker(this.config) : postgres.health(this.config); -// // if (database.connection === 'REST') { -// // return postgres.communications(this.config); -// // } -// // } else { -// // throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// // } -// // } - -// // async kafka() { -// // // Test metrics server connection -// // await utilities.testMetricsQuery(this.config); - -// // if (this.config.database.type === 'MongoDB') { -// // mongo.connect(this.config); -// // mongo.serverQuery(this.config); -// // } - -// // else if (this.config.database.type === 'PostgreSQL') { -// // postgres.connect(this.config); -// // postgres.serverQuery(this.config); -// // } else { -// // throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// // } -// // } - -// // async kubernetes() { -// // // Test metrics server connection -// // await utilities.testMetricsQuery(this.config); - -// // if (this.config.database.type === 'MongoDB') { -// // await mongo.connect(this.config); -// // await mongo.storeGrafanaAPIKey(this.config); -// // //await mongo.createGrafanaDashboards(this.config); -// // mongo.serverQuery(this.config); -// // // return mongo.modifyMetrics(this.config); -// // } - -// // else if (this.config.database.type === 'PostgreSQL') { -// // postgres.connect(this.config); -// // postgres.serverQuery(this.config); -// // } else { -// // throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// // } -// // } - -// // async docker () { -// // await utilities.testMetricsQuery(this.config); -// // if (this.config.database.type === 'MongoDB') { -// // await mongo.connect(this.config); -// // await mongo.storeGrafanaAPIKey(this.config); -// // mongo.serverQuery(this.config); -// // // return mongo.modifyMetrics(this.config); -// // } else if (this.config.database.type === 'PostgreSQL') { -// // postgres.connect(this.config); -// // postgres.serverQuery(this.config); -// // } else { -// // throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// // } -// // } - -// // ServerWrapper(server, proto, methods) { -// // /** -// // * Wraps the gRPC server object to automatically write logs to provided DB -// // * @param {*} server -// // * @param {*} proto -// // * @param {*} methods -// // */ -// // const { database } = this.config; -// // if (database.type === 'MongoDB') { -// // return new MongoServerWrapper(server, proto, methods, this.config); -// // } -// // if (database.type === 'PostgreSQL') { -// // return new PostgresServerWrapper(server, proto, methods, this.config); -// // } -// // return null; -// // } - -// // ClientWrapper(client, service) { -// // /** -// // * Wraps the gRPC client to automatically write logs to provided DB -// // * -// // * @param {*} client -// // * @param {*} service -// // */ -// // const { database } = this.config; -// // if (database.type === 'MongoDB') { -// // return new MongoClientWrapper(client, service, this.config); -// // } -// // if (database.type === 'PostgreSQL') { -// // return new PostgresClientWrapper(client, service, this.config); -// // } -// // return null; -// // } - -// // link(client, server) { -// // /** -// // * Allows the passthrough of metadata from gRPC server to gRPC client -// // * -// // * @param {*} client -// // * @param {*} servere -// // */ -// // client.metadata = server.metadataHolder; -// // } -// // } - -// // export default Chronos; -// // chronos.ts - -// import hpropagate from 'hpropagate'; -// import postgres from './controllers/postgres'; -// import mongo from './controllers/mongo'; -// import MongoClientWrapper from './wrappers/MongoClientWrapper'; -// import MongoServerWrapper from './wrappers/MongoServerWrapper'; -// import PostgresClientWrapper from './wrappers/PostgresClientWrapper'; -// import PostgresServerWrapper from './wrappers/PostgresServerWrapper'; -// import utilities from './controllers/utilities'; - -// // Optional: Define interfaces for your configuration. -// interface DatabaseConfig { -// type: 'MongoDB' | 'PostgreSQL'; -// URI: string; -// connection?: string; -// } - -// interface ChronosConfig { -// microservice: string; -// interval: number; -// dockerized?: boolean; -// database: DatabaseConfig; -// notifications?: any; // Update this type as needed. -// } - -// class Chronos { -// public config: ChronosConfig; - -// constructor(config: ChronosConfig) { -// if (!config) { -// throw new Error('Chronos config is undefined'); -// } -// // Validate all input fields exist and setup notifications. -// config = utilities.validateInput(config); -// config = utilities.addNotifications(config); -// this.config = config; -// } - -// /** -// * Inserts a unique x-correlating-id into the headers of each request/response. -// */ -// propagate(): void { -// hpropagate({ propagateInResponses: true }); -// } - -// /** -// * Main controller for tracking the microservice. -// */ -// track(): any { -// const { database, dockerized } = this.config; - -// if (database.type === 'MongoDB') { -// mongo.connect(this.config); -// mongo.services(this.config); -// dockerized ? mongo.docker(this.config) : mongo.health(this.config); - -// if (database.connection === 'REST') { -// return mongo.communications(this.config); -// } -// } else if (database.type === 'PostgreSQL') { -// postgres.connect(this.config); -// postgres.services(this.config); -// dockerized ? postgres.docker(this.config) : postgres.health(this.config); - -// if (database.connection === 'REST') { -// return postgres.communications(this.config); -// } -// } else { -// throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// } -// } - -// /** -// * Kafka-related tasks. -// */ -// async kafka(): Promise { -// // If testMetricsQuery is nested under helpers, use utilities.helpers.testMetricsQuery. -// await utilities.helpers.testMetricsQuery(this.config); - -// if (this.config.database.type === 'MongoDB') { -// mongo.connect(this.config); -// mongo.serverQuery(this.config); -// } else if (this.config.database.type === 'PostgreSQL') { -// postgres.connect(this.config); -// postgres.serverQuery(this.config); -// } else { -// throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// } -// } - -// /** -// * Kubernetes-related tasks. -// */ -// async kubernetes(): Promise { -// await utilities.helpers.testMetricsQuery(this.config); - -// if (this.config.database.type === 'MongoDB') { -// await mongo.connect(this.config); -// await mongo.storeGrafanaAPIKey(this.config); -// mongo.serverQuery(this.config); -// } else if (this.config.database.type === 'PostgreSQL') { -// postgres.connect(this.config); -// postgres.serverQuery(this.config); -// } else { -// throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// } -// } - -// /** -// * Docker-related tasks. -// */ -// async docker(): Promise { -// await utilities.helpers.testMetricsQuery(this.config); - -// if (this.config.database.type === 'MongoDB') { -// await mongo.connect(this.config); -// await mongo.storeGrafanaAPIKey(this.config); -// mongo.serverQuery(this.config); -// } else if (this.config.database.type === 'PostgreSQL') { -// postgres.connect(this.config); -// postgres.serverQuery(this.config); -// } else { -// throw new Error('The only allowed database types are MongoDB and PostgreSQL'); -// } -// } - -// /** -// * Wraps the gRPC server to automatically write logs to the provided DB. -// */ -// ServerWrapper(server: any, proto: any, methods: any): MongoServerWrapper | PostgresServerWrapper | null { -// const { database } = this.config; -// if (database.type === 'MongoDB') { -// return new MongoServerWrapper(server, proto, methods, this.config); -// } -// if (database.type === 'PostgreSQL') { -// return new PostgresServerWrapper(server, proto, methods, this.config); -// } -// return null; -// } - -// /** -// * Wraps the gRPC client to automatically write logs to the provided DB. -// */ -// ClientWrapper(client: any, service: any): MongoClientWrapper | PostgresClientWrapper | null { -// const { database } = this.config; -// if (database.type === 'MongoDB') { -// return new MongoClientWrapper(client, service, this.config); -// } -// if (database.type === 'PostgreSQL') { -// return new PostgresClientWrapper(client, service, this.config); -// } -// return null; -// } - -// /** -// * Links gRPC client and server metadata. -// */ -// link(client: any, server: any): void { -// client.metadata = server.metadataHolder; -// } -// } - -// export default Chronos; -// chronos.ts - -// import hpropagate from 'hpropagate'; - - import postgres from './controllers/postgres.js'; import mongo from './controllers/mongo.js'; import MongoClientWrapper from './wrappers/MongoClientWrapper.js'; diff --git a/chronos_npm_package/controllers/GrafanaPanel.ts b/chronos_npm_package/controllers/GrafanaPanel.ts index fba1932e6..7d79a26d6 100644 --- a/chronos_npm_package/controllers/GrafanaPanel.ts +++ b/chronos_npm_package/controllers/GrafanaPanel.ts @@ -1,310 +1,76 @@ -// function createGrafanaPanelObject( -// metric, -// datasource, -// graphType -// ) { -// // Create a panel object to be used within dashboards. -// const panel = { -// "datasource": datasource, -// "fieldConfig": { -// "defaults": { -// "color": { -// "mode": "palette-classic" -// }, -// "custom": { -// "axisCenteredZero": false, -// "axisColorMode": "text", -// "axisLabel": "", -// "axisPlacement": "auto", -// "barAlignment": 0, -// "drawStyle": "line", -// "fillOpacity": 40, -// "gradientMode": "opacity", -// "hideFrom": { -// "legend": false, -// "tooltip": false, -// "viz": false -// }, -// "lineInterpolation": "smooth", -// "lineWidth": 2, -// "pointSize": 5, -// "scaleDistribution": { -// "type": "linear" -// }, -// "showPoints": "auto", -// "spanNulls": false, -// "stacking": { -// "group": "A", -// "mode": "none" -// }, -// "thresholdsStyle": { -// "mode": "off" -// } -// }, -// "mappings": [], -// "thresholds": { -// "mode": "absolute", -// "steps": [ -// { -// "color": "green", -// "value": null -// }, -// { -// "color": "red", -// "value": 80 -// } -// ] -// }, -// "min": 0 -// }, -// "overrides": [] -// }, -// "gridPos": { -// "h": 8, -// "w": 12, -// "x": 0, -// "y": 0 -// }, -// "options": { -// "legend": { -// "calcs": [], -// "displayMode": "list", -// "placement": "bottom", -// "showLegend": true -// }, -// "tooltip": { -// "mode": "single", -// "sort": "none" -// }, -// // "displayLabels": [ -// // "percent" -// // ] - -// }, -// "id": 1, -// "targets": [{ -// "datasource": datasource, -// "editorMode": "builder", -// "expr": metric.metric.replace(/.*\/.*\//g, ''), -// "instant": false, -// "range": true, -// "refId": "A" -// }], -// "title": metric.metric.replace(/.*\/.*\//g, ''), -// "type": graphType, -// "interval": "2s" -// } -// return panel; -// } - -// function updateGrafanaPanelObject( -// metric, -// datasource, -// graphType -// ) { -// // Create a panel object to be used within dashboards. -// const panel = { -// "datasource": datasource, -// "fieldConfig": { -// "defaults": { -// "color": { -// "mode": "palette-classic" -// }, -// "custom": { -// "axisCenteredZero": false, -// "axisColorMode": "text", -// "axisLabel": "", -// "axisPlacement": "auto", -// "barAlignment": 0, -// "drawStyle": "line", -// "fillOpacity": 40, -// "gradientMode": "opacity", -// "hideFrom": { -// "legend": false, -// "tooltip": false, -// "viz": false -// }, -// "lineInterpolation": "smooth", -// "lineWidth": 2, -// "pointSize": 5, -// "scaleDistribution": { -// "type": "linear" -// }, -// "showPoints": "auto", -// "spanNulls": false, -// "stacking": { -// "group": "A", -// "mode": "none" -// }, -// "thresholdsStyle": { -// "mode": "off" -// } -// }, -// "mappings": [], -// "thresholds": { -// "mode": "absolute", -// "steps": [ -// { -// "color": "green", -// "value": null -// }, -// { -// "color": "red", -// "value": 80 -// } -// ] -// }, -// "min": 0 -// }, -// "overrides": [] -// }, -// "gridPos": { -// "h": 8, -// "w": 12, -// "x": 0, -// "y": 0 -// }, -// "options": { -// "legend": { -// "calcs": [], -// "displayMode": "list", -// "placement": "bottom", -// "showLegend": true -// }, -// "tooltip": { -// "mode": "single", -// "sort": "none" -// }, -// "displayLabels": [ -// "percent" -// ] -// }, -// "id": 1, -// "targets": [{ -// "datasource": datasource, -// "editorMode": "builder", -// "expr": metric.replace(/.*\/.*\//g, ''), -// "instant": false, -// "range": true, -// "refId": "A" -// }], -// "title": metric.replace(/.*\/.*\//g, ''), -// "type": graphType, -// "interval": "2s" -// } -// return panel; -// } - -// export default { createGrafanaPanelObject, updateGrafanaPanelObject }; - -// GrafanaPanel.ts - -// function createGrafanaPanelObject(metric: any, datasource: any, graphType?: any) { -// // ... Your implementation here ... -// // For example, you might use a default graph type if none is provided: -// const resolvedGraphType = graphType || 'defaultGraphType'; - -// return { -// datasource: datasource, -// fieldConfig: { -// defaults: { -// color: { mode: "palette-classic" }, -// custom: { -// axisCenteredZero: false, -// axisColorMode: "text", -// // ... other custom settings ... -// }, -// // ... other default settings ... -// }, -// mappings: [], -// thresholds: { -// mode: "absolute", -// steps: [ -// { color: "green", value: null }, -// { color: "red", value: 80 }, -// ], -// }, -// min: 0, -// }, -// // You can include the graphType in the returned object if needed: -// graphType: resolvedGraphType, -// // ... additional panel settings ... -// }; -// } - -// function updateGrafanaPanelObject(metric: any, datasource: any, graphType?: any) { -// // ... Your implementation here ... -// return { -// // ... update logic ... -// }; -// } - -// export default { createGrafanaPanelObject, updateGrafanaPanelObject }; -/** - * createGrafanaPanelObject - * Creates a new panel object for Grafana with optional 'graphType'. +/* + Creates a new panel object for a Grafana dashboard + - Takes in a **metric**, **datasource**, and an optional graphType. + - If no `graphType` is provided, it defaults to `'defaultGraphType'`. + * + * @param metric - Data point or metric to be visualized. + * @param datasource - The Grafana datasource to fetch data from. + * @param graphType - *(Optional)* The type of visualization (e.g., 'timeseries', 'gauge'). + * @returns A Grafana panel configuration object. */ export function createGrafanaPanelObject( metric: any, datasource: any, graphType?: any ) { - // For example, you might use a default graph type if none is provided: + // Sets a default graph type if none is provided const resolvedGraphType = graphType || 'defaultGraphType'; return { - datasource, + datasource, // πŸ”— Connect this panel to a Grafana datasource fieldConfig: { defaults: { - color: { mode: "palette-classic" }, + color: { mode: "palette-classic" }, custom: { axisCenteredZero: false, axisColorMode: "text", - // ... other custom settings ... + }, - // ... other default settings ... }, - mappings: [], + mappings: [], // thresholds: { mode: "absolute", steps: [ - { color: "green", value: null }, - { color: "red", value: 80 }, + { color: "green", value: null }, + { color: "red", value: 80 }, ], }, - min: 0, + min: 0, // }, - // You can include the graphType in the returned object if needed: - graphType: resolvedGraphType, - // ... additional panel settings ... + graphType: resolvedGraphType, + }; } /** - * updateGrafanaPanelObject - * Updates an existing panel object for Grafana with optional 'graphType'. + Updates an existing Grafana panel object + * - Similar to `createGrafanaPanelObject`, but for **modifying an existing panel**. + * - Uses an optional **graphType** (defaults to `'updatedGraphType'`). + * + * @param metric - The metric being updated in Grafana. + * @param datasource - The associated Grafana datasource. + * @param graphType - *(Optional)* Type of visualization. + * @returns An updated Grafana panel object. */ export function updateGrafanaPanelObject( metric: any, datasource: any, graphType?: any ) { + // Set a default graph type if none is provided const resolvedGraphType = graphType || 'updatedGraphType'; return { - datasource, - // Example update logic: + datasource, // fieldConfig: { defaults: { - color: { mode: "palette-classic" }, + color: { mode: "palette-classic" }, custom: { - // ... your update logic ... + }, - // ... other updates ... }, - // ... further updated config ... + }, - graphType: resolvedGraphType, - // ... more update logic ... + graphType: resolvedGraphType, }; } diff --git a/chronos_npm_package/controllers/alert.ts b/chronos_npm_package/controllers/alert.ts index ca130142a..187ce3bdd 100644 --- a/chronos_npm_package/controllers/alert.ts +++ b/chronos_npm_package/controllers/alert.ts @@ -1,189 +1,103 @@ -// import * as axios from 'axios'; -// import * as nodemailer from 'nodemailer'; - -// // const alert = {}; - -// interface SlackSettings { -// webhook: any; // possibly 'string'? -// } - -// interface EmailSettings { -// emails: string; -// emailHost: string; -// emailPort: string | number; -// user: string; -// password: string; -// } - -// interface Alert { -// sendSlack: (code: number, message: string, slackSettings: SlackSettings) => void; -// sendEmail: (code: number, message: string, emailSettings: EmailSettings) => void; -// } - -// const alert: Alert = { -// /** -// * Sends slack notifications to the provided slackurl with the status code -// * and message via an axios POST request -// // * @param {integer} code Response status code -// // * @param {string} message Response message -// // * @param {Object} slackSettings User provided slack settings -// */ - -// sendSlack : (code: number, message: string, slackSettings: any) => { -// const { webhook } = slackSettings; - -// // Data for POST request -// const data = { text: `${code}, ${message}, ${Date.now()}` }; - -// // Options for POST request -// const config = { -// method: 'post', -// headers: { -// 'Content-Type': 'application/json', -// }, -// }; - -// axios -// .post(webhook, data, config) -// .then(res => console.log('Status Code >= 400...\nError message sent')) -// .catch(error => console.log('test------>', error.message)); -// }, - -// /** -// * Sends email notifications using the provided email information with the -// * status code and message via an axios POST request -// // * @param {integer} code Response status code -// // * @param {string} message Response message -// // * @param {Object} emailSettings User provided email settings -// // */ - -// sendEmail : (code: number, message: string, emailSettings: any) => { -// const { emails, emailHost, emailPort, user, password } = emailSettings; - -// // Message object contains recipient email list and email text body -// const data = { -// to: `${emails}`, -// subject: 'Error from Middleware', -// text: `${code}, ${message}`, -// }; - -// // Configuration settings for email notifications -// const config = { -// host: `${emailHost}`, -// port: `${emailPort}`, -// auth: { -// user: `${user}`, -// pass: `${password}`, -// }, -// }; -// const transport = nodemailer.createTransport(config); - -// transport.sendMail(data, function (err, info) { -// if (err) { -// console.log(err); -// } else { -// console.log(info); -// } -// }); -// }, -// }; - -// export default alert -// alert.ts - -// Option A: Preferred if your tsconfig.json has "esModuleInterop": true +import nodemailer from 'nodemailer'; // Importing nodemailer for sending emails import axios from 'axios'; -// Option B (if you can’t enable esModuleInterop): -// import * as axiosImport from 'axios'; -// const axios = axiosImport.default; - -import nodemailer from 'nodemailer'; - +// Defined a interface for Slack settings +// This describes the structure of the `slackSettings` object. +// There is a property of `webhook` with a string value, which is the URL to send Slack messages. interface SlackSettings { - webhook: string; // assuming webhook is a URL string + webhook: string; // The Slack webhook URL } +// Defined a TypeScript interface for Email settings +// This describes the structure of `emailSettings` used for sending emails. interface EmailSettings { - emails: string; - emailHost: string; - emailPort: string | number; - user: string; - password: string; + emails: string; // A comma-separated list of email addresses + emailHost: string; // SMTP server (e.g., "smtp.gmail.com") + emailPort: string | number; // SMTP port + user: string; // Email username (e.g., your email address) + password: string; // Email password } +// Defined an interface for the Alert object +// This helps TypeScript make sure our `alert` object has two methods: sendSlack & sendEmail. interface Alert { sendSlack: (code: number, message: string, slackSettings: SlackSettings) => void; sendEmail: (code: number, message: string, emailSettings: EmailSettings) => void; } +// Creates an Alert object with 2 functions const alert: Alert = { /** - * Sends slack notifications to the provided slack webhook URL with the status code - * and message via an Axios POST request. + * Sends a notification to a Slack channel when an error occurs. + * This function makes an HTTP POST request to the Slack webhook URL. * - * @param code Response status code - * @param message Response message - * @param slackSettings User provided slack settings + * @param code - The HTTP status code + * @param message - The error message to send + * @param slackSettings - The Slack webhook URL provided by the user */ sendSlack: (code: number, message: string, slackSettings: SlackSettings) => { - const { webhook } = slackSettings; - // Data for POST request + const { webhook } = slackSettings; // Declare a const destructuring slackSettings for the webhook URL + + //Prepares the message payload const data = { text: `${code}, ${message}, ${Date.now()}` }; - // Options for POST request + + // Sets up the request headers const config = { headers: { - 'Content-Type': 'application/json', + 'Content-Type': 'application/json', // Ensures proper formatting for Slack }, }; + // Send the POST request using axios axios .post(webhook, data, config) - .then((res) => console.log('Status Code >= 400...\nError message sent')) - .catch((error) => console.log('Error sending Slack message:', error.message)); + .then(() => console.log('βœ… Slack alert sent successfully!')) + .catch(error => console.log('❌ Error sending Slack message:', error.message)); }, /** - * Sends email notifications using the provided email information with the - * status code and message via Nodemailer. + * Sends an email notification when an error occurs. + * Uses nodemailer to send the email through an SMTP server. * - * @param code Response status code - * @param message Response message - * @param emailSettings User provided email settings + * @param code - The HTTP status code + * @param message - The error message to send + * @param emailSettings - The SMTP configuration and recipient emails */ sendEmail: (code: number, message: string, emailSettings: EmailSettings) => { - const { emails, emailHost, emailPort, user, password } = emailSettings; + const { emails, emailHost, emailPort, user, password } = emailSettings; // Extract email settings - // Message object for the email + // Format for the email message const mailOptions = { - to: emails, - subject: 'Error from Middleware', - text: `${code}, ${message}`, + to: emails, // Recipient(s) + subject: '🚨 Error Alert from Middleware', + text: `${code}, ${message}`, // Email content }; - // Convert port to number if necessary - const portNumber = - typeof emailPort === 'string' ? parseInt(emailPort, 10) : emailPort; + // Ensure emailPort is always a number + const portNumber = typeof emailPort === 'string' ? parseInt(emailPort, 10) : emailPort; - // Configuration settings for Nodemailer + // Set up the SMTP transport configuration const transportConfig = { - host: emailHost, - port: portNumber, + host: emailHost, // SMTP server (e.g., "smtp.gmail.com") + port: portNumber, // Convert to number if needed auth: { - user: user, - pass: password, + user: user, // Email username + pass: password, // Email password or app password }, }; + // Create an email transporter using nodemailer const transport = nodemailer.createTransport(transportConfig); + // Send the email transport.sendMail(mailOptions, (err, info) => { if (err) { - console.log('Error sending email:', err); + console.log('❌ Error sending email:', err); } else { - console.log('Email sent:', info); + console.log('βœ… Email sent successfully:', info); } }); }, }; +// Export the alert object for use in other files export default alert; diff --git a/chronos_npm_package/controllers/createGrafanaDashboard.ts b/chronos_npm_package/controllers/createGrafanaDashboard.ts index 9a45b8d20..8ec06b0d5 100644 --- a/chronos_npm_package/controllers/createGrafanaDashboard.ts +++ b/chronos_npm_package/controllers/createGrafanaDashboard.ts @@ -1,281 +1,70 @@ -// const axios = require('axios'); -// const createGrafanaPanelObject = require('./createGrafanaPanelObject,js'); - -// async function createGrafanaDashboard( -// metrix, -// datasource, -// ) { -// // create dashboard object boilerplate -// const dashboard = { -// "dashboard": { -// "id": null, -// "uid": metrix.meric.replace(/.*\/.*\//g, ''), -// "title": metrix.meric.replace(/.*\/.*\//g, ''), -// "tags": ["templated"], -// "timezone": "browser", -// "schemaVersion": 16, -// "version": 0, -// "refresh": "10s", -// panels: [], -// }, -// folderId: 0, -// overwrite: true, -// }; - - -// // push panel into dashboard object with a line for each metric in promQLQueries object -// dashboard.dashboard.panels.push(createGrafanaPanelObject(metrix, datasource)); - -// try { -// // POST request to Grafana Dashboard API to create a dashboard -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': 'Bearer glsa_pITqM0BIfNHNKL4PsXJqmTYQl0D9QGxF_486f63e1' -// }, -// } -// ); - -// // Descriptive error log for developers -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In createGrafanaDashboardObject.' -// ); -// } else { -// // A simple console log to show when graphs are done being posted to Grafana. -// console.log(`πŸ“Š Grafana graphs πŸ“Š for the ${containerName} container are ready!!`); -// } -// } catch (err) { -// console.log(err); -// } -// } - -// module.exports = createGrafanaDashboard; - -// -// import axios from 'axios'; -// import createGrafanaPanelObject from './GrafanaPanel'; // omit the '.ts' extension - -// // Example interfaces – adjust to your actual data shape -// interface Metrix { -// meric: string; // Use "meric" if that’s the actual field, otherwise consider renaming to "metric" -// containerName?: string; // Optional field for container name -// // add other fields as needed -// } - -// interface Datasource { -// // Define fields for your datasource object (e.g., name, type, url, etc.) -// } - -// interface DashboardPanel { -// // Define the structure returned by createGrafanaPanelObject if needed. -// // For example: -// // id: number; -// // title: string; -// // type: string; -// // targets: any[]; -// } - -// export async function createGrafanaDashboard( -// metrix: Metrix, -// datasource: Datasource, -// ): Promise { -// // Create the dashboard payload -// const dashboard = { -// dashboard: { -// id: null, -// uid: metrix.meric.replace(/.*\/.*\//g, ''), -// title: metrix.meric.replace(/.*\/.*\//g, ''), -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as DashboardPanel[], // Type the panels array if you know the panel's structure -// }, -// folderId: 0, -// overwrite: true, -// }; - -// // Create a panel using the imported function and add it to the dashboard -// const panel = createGrafanaPanelObject(metrix, datasource); -// dashboard.dashboard.panels.push(panel); - -// try { -// // POST request to Grafana Dashboard API to create or update a dashboard -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), // You can also pass 'dashboard' directly; axios handles JSON conversion. -// { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': 'Bearer glsa_pITqM0BIfNHNKL4PsXJqmTYQl0D9QGxF_486f63e1', -// }, -// }, -// ); - -// // Check the response status (axios throws for 4xx/5xx errors by default) -// if (dashboardResponse.status >= 400) { -// console.error('Error with POST request to Grafana Dashboards API.'); -// } else { -// // Log success message, using containerName if available -// if (metrix.containerName) { -// console.log(`πŸ“Š Grafana graphs for the "${metrix.containerName}" container are ready!`); -// } else { -// console.log('πŸ“Š Grafana graphs are ready!'); -// } -// } -// } catch (err) { -// console.error('Error creating Grafana dashboard:', err); -// } -// } -// import axios from 'axios'; -// // Import the default export from GrafanaPanel.ts as an object. -// import GrafanaPanel from './GrafanaPanel'; - -// // Example interfaces (adjust to suit your actual data shape) -// interface Metrix { -// meric: string; // e.g., "some/metric/string" -// containerName?: string; // optional container name field -// // add other fields as needed -// } - -// interface Datasource { -// // define relevant fields for your datasource object, e.g., name, type, url, etc. -// } - -// interface DashboardPanel { -// // define the structure of a dashboard panel as returned by createGrafanaPanelObject if needed -// // e.g., id?: number; title?: string; type?: string; targets?: any[]; -// } - -// export async function createGrafanaDashboard( -// metrix: Metrix, -// datasource: Datasource, -// ): Promise { - -// // Create the dashboard payload -// const dashboard = { -// dashboard: { -// id: null, -// uid: metrix.meric.replace(/.*\/.*\//g, ''), -// title: metrix.meric.replace(/.*\/.*\//g, ''), -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as DashboardPanel[], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// // Use the createGrafanaPanelObject function from the imported GrafanaPanel object. -// const panel = GrafanaPanel.createGrafanaPanelObject(metrix, datasource); -// dashboard.dashboard.panels.push(panel); - -// try { -// // POST request to Grafana Dashboard API to create/update a dashboard -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': 'Bearer glsa_pITqM0BIfNHNKL4PsXJqmTYQl0D9QGxF_486f63e1', -// }, -// }, -// ); - -// // Check the response status (axios throws on error responses by default) -// if (dashboardResponse.status >= 400) { -// console.error('Error with POST request to Grafana Dashboards API.'); -// } else { -// if (metrix.containerName) { -// console.log(`πŸ“Š Grafana graphs for the "${metrix.containerName}" container are ready!`); -// } else { -// console.log('πŸ“Š Grafana graphs are ready!'); -// } -// } -// } catch (err) { -// console.error('Error creating Grafana dashboard:', err); -// } -// } import axios from 'axios'; -// Import the default export from GrafanaPanel.ts as an object. +// Importing the function that generates Grafana panel objects import { createGrafanaPanelObject } from '../controllers/GrafanaPanel.js'; -// Example interfaces (adjust to suit your actual data shape) -interface Metrix { - meric: string; // e.g., "some/metric/string" - containerName?: string; // optional container name field - // add other fields as needed -} +//Defined Interfaces to provide type safety -interface Datasource { - // define relevant fields for your datasource object, e.g., name, type, url, etc. +interface Metric { + metric: string; // βœ… Represents the metric name in Grafana } -interface DashboardPanel { - // define the structure of a dashboard panel as returned by createGrafanaPanelObject if needed - // e.g., id?: number; title?: string; type?: string; targets?: any[]; +interface Datasource { + type: string; // Defines the type of datasource + uid: string; // Unique identifier for the datasource in Grafana } +/** + * This function creates a Grafana Dashboard and adds a panel to it. + * The dashboard then gets stored inside Grafana via an HTTP request. + * @param metric - The monitoring metric for which we are creating a dashboard + * @param datasource - The Grafana datasource used for this metric + */ export async function createGrafanaDashboard( - metrix: Metrix, - datasource: Datasource, + metric: Metric, // Accepts a Metric object as input + datasource: Datasource // Accepts a Datasource object as input ): Promise { - // Create the dashboard payload + const dashboard = { dashboard: { - id: null, - uid: metrix.meric.replace(/.*\/.*\//g, ''), - title: metrix.meric.replace(/.*\/.*\//g, ''), - tags: ['templated'], - timezone: 'browser', - schemaVersion: 16, - version: 0, - refresh: '10s', - panels: [] as DashboardPanel[], + id: null, // New dashboard (doesn’t exist yet, so ID is null) + uid: metric.metric.replace(/.*\/.*\//g, ''), //accesses unique identifier from the metric + title: metric.metric.replace(/.*\/.*\//g, ''), // Title of the dashboard (same as metric name) + tags: ['templated'], // Tags help categorize dashboards + timezone: 'browser', // Uses the local browser’s timezone + schemaVersion: 16, // Grafana’s API version + version: 0, // Initial version + refresh: '10s', // Auto-refresh every 10 seconds + panels: [] as any[], // Array to hold panels (empty initially) }, - folderId: 0, - overwrite: true, + folderId: 0, // Stores the dashboard in the root folder + overwrite: true, // Allows overwriting if a dashboard with the same UID exists }; - // Use the createGrafanaPanelObject function from the imported GrafanaPanel object. - // With the updated signature, the third parameter is optional. - const panel = createGrafanaPanelObject(metrix, datasource); - dashboard.dashboard.panels.push(panel); + // Creates and adds a panel to the Dashboard + const panel = createGrafanaPanelObject(metric, datasource); // generates a panel using the imported function + dashboard.dashboard.panels.push(panel); // Add the generated panel to the dashboard try { - // POST request to Grafana Dashboard API to create/update a dashboard + // Sends a POST request to Grafana to create/update the dashboard const dashboardResponse = await axios.post( - 'http://localhost:32000/api/dashboards/db', - JSON.stringify(dashboard), + 'http://localhost:32000/api/dashboards/db', // Grafana API endpoint for dashboards + JSON.stringify(dashboard), { headers: { 'Content-Type': 'application/json', - 'Authorization': 'Bearer glsa_pITqM0BIfNHNKL4PsXJqmTYQl0D9QGxF_486f63e1', + 'Authorization': 'Bearer glsa_pITqM0BIfNHNKL4PsXJqmTYQl0D9QGxF_486f63e1', // Grafana API Key }, }, ); - // Check the response status (axios throws on error responses by default) + // Error Handler if (dashboardResponse.status >= 400) { - console.error('Error with POST request to Grafana Dashboards API.'); + console.error('⚠️ Error creating Grafana Dashboard: Check API or JSON request.'); } else { - if (metrix.containerName) { - console.log(`πŸ“Š Grafana graphs for the "${metrix.containerName}" container are ready!`); - } else { - console.log('πŸ“Š Grafana graphs are ready!'); - } + console.log(`πŸ“Š Grafana dashboard for metric "${metric.metric}" is ready!`); } } catch (err) { - console.error('Error creating Grafana dashboard:', err); + console.error('🚨 Error creating Grafana dashboard:', err); } } diff --git a/chronos_npm_package/controllers/dockerHelper.ts b/chronos_npm_package/controllers/dockerHelper.ts index 3684e970d..6f1bd08e0 100644 --- a/chronos_npm_package/controllers/dockerHelper.ts +++ b/chronos_npm_package/controllers/dockerHelper.ts @@ -1,115 +1,67 @@ -// // importing systeminformation -// import * as si from 'systeminformation'; - -// /** -// * Finds the data pt with containerName that matches microservice and extracts container ID, name, platform, and start time. -// * @param {*} microservice -// * @returns array of active containers (ea. container = an obj). -// */ -// async function getDockerContainer(microservice) { -// try { - -// const containers = await si.dockerContainers(); -// const out = {}; -// let found = false; -// for (let container of containers) { -// if (container.name === microservice) { -// found = true; -// out.containername = microservice; -// out.containerid = container.id; -// out.platform = container.platform; -// out.starttime = container.startedAt; -// break; -// } -// } - -// if (found) { -// return out -// } else { -// throw new Error(`Unable to find Docker container with name dockerHelper.js LN 28${microservice}`) -// } - -// } catch (e) { -// console.error(e); -// return e; -// } - -// } - - -// async function readDockerContainer(input) { -// const out = {...input}; -// try { -// const data = await si.dockerContainerStats(input.containerid); -// out.memoryusage = data[0].memUsage; -// out.memorylimit = data[0].memLimit; -// out.memorypercent = data[0].memPercent; -// out.cpupercent = data[0].cpuPercent; -// out.networkreceived = data[0].netIO.rx; -// out.networksent = data[0].netIO.wx; -// out.processcount = data[0].pids; -// out.restartcount = data[0].restartCount; -// out.time = Date.now(); -// return out; - -// } catch (e) { -// console.error(`Unable to query container id ${input.containerid}`); -// return new Error('Unable to identify active Docker containers'); -// } -// } - -// export default { getDockerContainer, readDockerContainer}; - -// importing systeminformation import * as si from 'systeminformation'; -// Remove the file extension in your import: import { createGrafanaPanelObject } from '../controllers/GrafanaPanel.js'; -// Define your interfaces +/** + Interface for a Basic Docker Container + + */ interface DockerContainer { - containername: string; - containerid: string; - platform: string; - starttime: string; + containername: string; // Name of the container + containerid: string; // Unique identifier + platform: string; // Operating system running inside the container + starttime: string; // Timestamp when the container started } +/* + Interface for Extended Docker Container Stats + - Extends DockerContainer to add real-time performance metrics. + - DockerContainerStats includes all properties of DockerContainer plus more. + */ interface DockerContainerStats extends DockerContainer { - memoryusage?: number; - memorylimit?: number; - memorypercent?: number; - cpupercent?: number; - networkreceived?: number; - networksent?: number; - processcount?: number; - restartcount?: number; - time?: number; + memoryusage?: number; // RAM used by the container + memorylimit?: number; // Max allowed RAM for the container + memorypercent?: number; // Percentage of memory used + cpupercent?: number; // CPU usage as a percentage + networkreceived?: number; // Bytes received over the network + networksent?: number; // Bytes sent over the network + processcount?: number; // Number of active processes inside the container + restartcount?: number; // Number of times the container has restarted + time?: number; // Timestamp of when the data was collected } -/** - * Finds the data point with containerName that matches microservice and extracts container ID, name, platform, and start time. - * @param microservice - The name of the microservice. - * @returns A Promise resolving to a DockerContainer object. +/* +Finds and Returns Basic Docker Container Information + + - Uses `systeminformation.dockerContainers()` to get a list of all running containers. + - It **searches for a container matching microservice and extracts its core info. + - If no matching container is found, it throws an error**. + + @param microservice - The **name of the microservice (container) to find. + @returns A **Promise** resolving to a `DockerContainer` object. */ async function getDockerContainer(microservice: string): Promise { try { + // Fetch all running Docker containers const containers = await si.dockerContainers(); - const out = {} as DockerContainer; + const out = {} as DockerContainer; // Creating an empty object following the interface structure let found = false; - for (let container of containers) { + + // Search for the container with the microservice name + for (const container of containers) { if (container.name === microservice) { found = true; out.containername = microservice; out.containerid = container.id; out.platform = container.platform; out.starttime = container.startedAt; - break; + break; // Exit loop after finding the correct container } } if (found) { - return out; + return out; // Return container info } else { - throw new Error(`Unable to find Docker container with name dockerHelper.js LN 28 ${microservice}`); + throw new Error(`❌ Unable to find Docker container with name: ${microservice}`); } } catch (e) { console.error(e); @@ -117,29 +69,40 @@ async function getDockerContainer(microservice: string): Promise { + // Create a new object based on the original container details const out = { ...input } as DockerContainerStats; + try { + // Fetch real-time stats for the given container ID const data = await si.dockerContainerStats(input.containerid); - out.memoryusage = data[0].memUsage; - out.memorylimit = data[0].memLimit; - out.memorypercent = data[0].memPercent; - out.cpupercent = data[0].cpuPercent; - out.networkreceived = data[0].netIO.rx; - out.networksent = data[0].netIO.wx; - out.processcount = data[0].pids; - out.restartcount = data[0].restartCount; - out.time = Date.now(); - return out; + + // Extract and map system data to our TypeScript interface + out.memoryusage = data[0].memUsage; // RAM used + out.memorylimit = data[0].memLimit; // Max RAM allowed + out.memorypercent = data[0].memPercent; // Memory usage percentage + out.cpupercent = data[0].cpuPercent; // CPU usage percentage + out.networkreceived = data[0].netIO.rx; // Bytes received + out.networksent = data[0].netIO.wx; // Bytes sent + out.processcount = data[0].pids; // Number of processes + out.restartcount = data[0].restartCount; // Number of restarts + out.time = Date.now(); // Timestamp of data retrieval + + return out; // Return the updated object } catch (e) { - console.error(`Unable to query container id ${input.containerid}`); + console.error(`❌ Unable to query container ID: ${input.containerid}`); throw new Error('Unable to identify active Docker containers'); } } + export default { getDockerContainer, readDockerContainer }; diff --git a/chronos_npm_package/controllers/getDataSource.ts b/chronos_npm_package/controllers/getDataSource.ts index 02ec92636..21c5b1473 100644 --- a/chronos_npm_package/controllers/getDataSource.ts +++ b/chronos_npm_package/controllers/getDataSource.ts @@ -1,17 +1,18 @@ -import axios from 'axios'; +import axios from 'axios'; // Imports Axios for making HTTP requests async function getGrafanaDatasource() { - // Fetch datasource information from grafana API. - // This datasource is PRECONFIGURED on launch using grafana config. + // Make an HTTP request to get the list of datasources from Grafana. + // Grafana uses datasources like Prometheus, InfluxDB, etc., to store and retrieve monitoring metrics. const datasourceResponse = await axios.get('http://localhost:32000/api/datasources'); - // Create a datasource object to be used within panels. + // Access the first datasource from the response. + // Grafana API returns an array of datasources, so we are assuming the first one is the one we need. const datasource = { - type: datasourceResponse[0].type, - uid: datasourceResponse[0].uid, + type: datasourceResponse[0].type, // This is the type of datasource (i.e., Prometheus) + uid: datasourceResponse[0].uid, // Unique identifier for this datasource }; - return datasource; + return datasource; // Return the accessed datasource info. } -module.exports = getGrafanaDatasource; \ No newline at end of file +module.exports = getGrafanaDatasource; diff --git a/chronos_npm_package/controllers/healthHelpers.ts b/chronos_npm_package/controllers/healthHelpers.ts index 5ec186a36..e126ce6ed 100644 --- a/chronos_npm_package/controllers/healthHelpers.ts +++ b/chronos_npm_package/controllers/healthHelpers.ts @@ -1,669 +1,21 @@ -// // import * as si from 'systeminformation'; - -// // const healthHelpers = {}; - -// // /** -// // * This object contains all systeminformation methods, -// // * metric names, and corresponding data points -// // */ - -// // const collectedMetrics = { -// // cpu: { -// // speed_in_GHz: 'speed', -// // speedMax_in_GHz: 'speedMax', -// // num_of_cores: 'cores', -// // num_of_processors: 'processors', -// // 'cache.l1d in bytes': 'cache.l1d', -// // 'cache.l1i in bytes': 'cache.l1i', -// // 'cache.l2 in bytes': 'cache.l2', -// // 'cache.l3 in bytes': 'cache.l3', -// // }, -// // cpuCurrentSpeed: { -// // average_CPU_speed_in_GHz: 'avg', -// // minimum_CPU_speed_in_GHz: 'min', -// // maximum_CPU_speed_in_GHz: 'max', -// // }, -// // cpuTemperature: { -// // average_temperature: 'main', -// // max_temperature: 'max', -// // }, -// // currentLoad: { -// // average_CPU_load_percent: 'avg', -// // current_CPU_load_percent: 'currentLoad', -// // current_CPU_load_user_percent: 'currentLoadUser', -// // current_CPU_load__system_percent: 'currentLoadSystem', -// // current_CPU_load_nice_percent: 'currentLoadNice', -// // current_CPU_load_idle_percent: 'currentLoadIdle', -// // current_CPU_load_raw_ticks: 'rawCurrentLoad', -// // }, -// // mem: { -// // totalmemory_in_bytes: 'total', -// // freememory_in_bytes: 'free', -// // usedmemory_in_bytes: 'used', -// // activememory_in_bytes: 'active', -// // buffers_plus_cache_in_bytes: 'buffcache', -// // available_memory: 'available', -// // }, -// // processes: { -// // totalprocesses: 'all', -// // blockedprocesses: 'blocked', -// // runningprocesses: 'running', -// // sleepingprocesses: 'sleeping', -// // }, -// // inetLatency: 'all data collected', -// // }; - -// // /** -// // * collectHealthData scrapes metrics for microservices -// // * @returns Promise array with each metric in an object -// // */ - -// // healthHelpers.collectHealthData = async() => { - -// // const healthDataCollection = []; -// // const time = Date.now(); - -// // /** obtains core CPU metrics and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.cpu() -// // .then(data => { -// // // console.log(data) -// // const siMethodName = 'cpu'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'CPU', -// // time, -// // }); -// // } -// // // console.log('CPU HEALTH METRICS',healthDataCollection) -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** obtains CPU speed metrics and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.cpuCurrentSpeed() -// // .then(data => { -// // const siMethodName = 'cpuCurrentSpeed'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'CPU', -// // time, -// // }); -// // } -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** obtains CPU temperature metrics and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ - -// // await si.cpuTemperature() -// // .then(data => { -// // const siMethodName = 'cpuTemperature'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'CPU', -// // time, -// // }); -// // } -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** -// // * obtains metrics relating to current load and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.currentLoad() -// // .then(data => { -// // const siMethodName = 'currentLoad'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'CPU', -// // time, -// // }); -// // } -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** -// // * obtains metrics relating to memory and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.mem() -// // .then(data => { -// // const siMethodName = 'mem'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'Memory', -// // time, -// // }); -// // } -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** obtains metrics relating to current processes and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.processes() -// // .then(data => { -// // const siMethodName = 'processes'; -// // for (let metricName in collectedMetrics[siMethodName]) { -// // healthDataCollection.push({ -// // metric: metricName, -// // value: data[collectedMetrics[siMethodName][metricName]], -// // category: 'Processes', -// // time, -// // }); -// // } -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** obtains latency and creates and pushes object with -// // * metric name and value to the healthDataCollection array -// // */ -// // await si.inetLatency() -// // .then(data => { -// // healthDataCollection.push({ -// // metric: 'latency', -// // value: data, -// // category: 'Latency', -// // time, -// // }); - -// // }) -// // .catch(err => { -// // if (err) { -// // throw err; -// // } -// // }); - -// // /** Return a promise that resolves to an array of all of the data points -// // * and removes any empty strings, NaN, or "NaN" from values prevent database errors -// // */ -// // return Promise.all(healthDataCollection).then(array => { -// // // console.log("PROMISE ARRAY",array) -// // return array.filter(metric => { -// // if (isNaN(metric.value) || metric.value === 'NaN' || metric.value === '' || metric.value === null) return false; -// // else return true; -// // }) -// // } -// // ); -// // }; - -// // export default healthHelpers; - -// import * as si from 'systeminformation'; - -// // Define an interface for the metric objects -// interface HealthMetric { -// metric: string; -// value: number; -// category: string; -// time: number; -// } - -// // Create a helpers object. You could also define an interface for it if desired. -// const healthHelpers: { collectHealthData?: () => Promise } = {}; - -// /** -// * This object contains all systeminformation methods, -// * metric names, and corresponding data points -// */ -// const collectedMetrics = { -// cpu: { -// speed_in_GHz: 'speed', -// speedMax_in_GHz: 'speedMax', -// num_of_cores: 'cores', -// num_of_processors: 'processors', -// 'cache.l1d in bytes': 'cache.l1d', -// 'cache.l1i in bytes': 'cache.l1i', -// 'cache.l2 in bytes': 'cache.l2', -// 'cache.l3 in bytes': 'cache.l3', -// }, -// cpuCurrentSpeed: { -// average_CPU_speed_in_GHz: 'avg', -// minimum_CPU_speed_in_GHz: 'min', -// maximum_CPU_speed_in_GHz: 'max', -// }, -// cpuTemperature: { -// average_temperature: 'main', -// max_temperature: 'max', -// }, -// currentLoad: { -// average_CPU_load_percent: 'avg', -// current_CPU_load_percent: 'currentLoad', -// current_CPU_load_user_percent: 'currentLoadUser', -// current_CPU_load__system_percent: 'currentLoadSystem', -// current_CPU_load_nice_percent: 'currentLoadNice', -// current_CPU_load_idle_percent: 'currentLoadIdle', -// current_CPU_load_raw_ticks: 'rawCurrentLoad', -// }, -// mem: { -// totalmemory_in_bytes: 'total', -// freememory_in_bytes: 'free', -// usedmemory_in_bytes: 'used', -// activememory_in_bytes: 'active', -// buffers_plus_cache_in_bytes: 'buffcache', -// available_memory: 'available', -// }, -// processes: { -// totalprocesses: 'all', -// blockedprocesses: 'blocked', -// runningprocesses: 'running', -// sleepingprocesses: 'sleeping', -// }, -// inetLatency: 'all data collected', -// }; - -// /** -// * collectHealthData scrapes metrics for microservices -// * @returns Promise array with each metric in an object -// */ -// healthHelpers.collectHealthData = async (): Promise => { -// // Annotate the array so TypeScript knows what it contains -// const healthDataCollection: HealthMetric[] = []; -// const time = Date.now(); - -// // Obtain core CPU metrics -// await si.cpu() -// .then(data => { -// const siMethodName = 'cpu'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain CPU speed metrics -// await si.cpuCurrentSpeed() -// .then(data => { -// const siMethodName = 'cpuCurrentSpeed'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain CPU temperature metrics -// await si.cpuTemperature() -// .then(data => { -// const siMethodName = 'cpuTemperature'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain current load metrics -// await si.currentLoad() -// .then(data => { -// const siMethodName = 'currentLoad'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain memory metrics -// await si.mem() -// .then(data => { -// const siMethodName = 'mem'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'Memory', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain process metrics -// await si.processes() -// .then(data => { -// const siMethodName = 'processes'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'Processes', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Obtain latency metric -// await si.inetLatency() -// .then(data => { -// healthDataCollection.push({ -// metric: 'latency', -// value: data, -// category: 'Latency', -// time, -// }); -// }) -// .catch(err => { -// if (err) { -// throw err; -// } -// }); - -// // Return the filtered collection. Since all elements are objects, -// // Promise.all isn’t necessary here (unless you had promises inside the array), -// // but it works fine. -// return Promise.all(healthDataCollection).then(array => -// array.filter(metric => { -// // Remove any metrics with invalid values -// if (isNaN(metric.value) || metric.value === 'NaN' || metric.value === '' || metric.value === null) { -// return false; -// } -// return true; -// }) -// ); -// }; - -// export default healthHelpers; - -// import * as si from 'systeminformation'; - -// // Update the HealthMetric interface to allow value to be a number, string, or null. -// interface HealthMetric { -// metric: string; -// value: number | string | null; -// category: string; -// time: number; -// } - -// // Create the helpers object. -// const healthHelpers: { collectHealthData?: () => Promise } = {}; - -// /** -// * This object contains all systeminformation methods, -// * metric names, and corresponding data points. -// */ -// const collectedMetrics = { -// cpu: { -// speed_in_GHz: 'speed', -// speedMax_in_GHz: 'speedMax', -// num_of_cores: 'cores', -// num_of_processors: 'processors', -// 'cache.l1d in bytes': 'cache.l1d', -// 'cache.l1i in bytes': 'cache.l1i', -// 'cache.l2 in bytes': 'cache.l2', -// 'cache.l3 in bytes': 'cache.l3', -// }, -// cpuCurrentSpeed: { -// average_CPU_speed_in_GHz: 'avg', -// minimum_CPU_speed_in_GHz: 'min', -// maximum_CPU_speed_in_GHz: 'max', -// }, -// cpuTemperature: { -// average_temperature: 'main', -// max_temperature: 'max', -// }, -// currentLoad: { -// average_CPU_load_percent: 'avg', -// current_CPU_load_percent: 'currentLoad', -// current_CPU_load_user_percent: 'currentLoadUser', -// current_CPU_load__system_percent: 'currentLoadSystem', -// current_CPU_load_nice_percent: 'currentLoadNice', -// current_CPU_load_idle_percent: 'currentLoadIdle', -// current_CPU_load_raw_ticks: 'rawCurrentLoad', -// }, -// mem: { -// totalmemory_in_bytes: 'total', -// freememory_in_bytes: 'free', -// usedmemory_in_bytes: 'used', -// activememory_in_bytes: 'active', -// buffers_plus_cache_in_bytes: 'buffcache', -// available_memory: 'available', -// }, -// processes: { -// totalprocesses: 'all', -// blockedprocesses: 'blocked', -// runningprocesses: 'running', -// sleepingprocesses: 'sleeping', -// }, -// inetLatency: 'all data collected', -// }; - -// /** -// * collectHealthData scrapes metrics for microservices. -// * @returns Promise that resolves to an array of health metric objects. -// */ -// healthHelpers.collectHealthData = async (): Promise => { -// const healthDataCollection: HealthMetric[] = []; -// const time = Date.now(); - -// // Obtain core CPU metrics -// await si.cpu() -// .then(data => { -// const siMethodName = 'cpu'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain CPU speed metrics -// await si.cpuCurrentSpeed() -// .then(data => { -// const siMethodName = 'cpuCurrentSpeed'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain CPU temperature metrics -// await si.cpuTemperature() -// .then(data => { -// const siMethodName = 'cpuTemperature'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain current load metrics -// await si.currentLoad() -// .then(data => { -// const siMethodName = 'currentLoad'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'CPU', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain memory metrics -// await si.mem() -// .then(data => { -// const siMethodName = 'mem'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'Memory', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain process metrics -// await si.processes() -// .then(data => { -// const siMethodName = 'processes'; -// for (let metricName in collectedMetrics[siMethodName]) { -// healthDataCollection.push({ -// metric: metricName, -// value: data[collectedMetrics[siMethodName][metricName]], -// category: 'Processes', -// time, -// }); -// } -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Obtain latency metric -// await si.inetLatency() -// .then(data => { -// healthDataCollection.push({ -// metric: 'latency', -// value: data, -// category: 'Latency', -// time, -// }); -// }) -// .catch(err => { -// if (err) throw err; -// }); - -// // Return the filtered collection. -// // For each metric, if value is null or an empty string, or if converting it to a number results in NaN, -// // filter it out. -// return Promise.all(healthDataCollection).then(array => -// array.filter(metric => { -// if (metric.value === null || metric.value === '') { -// return false; -// } -// const numericValue = -// typeof metric.value === 'number' -// ? metric.value -// : Number(metric.value); -// return !isNaN(numericValue); -// }) -// ); -// }; - -// export default healthHelpers; - import * as si from 'systeminformation'; +/** + Defines the structure of a health metric** + + */ interface HealthMetric { - metric: string; - value: number | string | null; - category: string; - time: number; + metric: string; // Name of the metric (i.e, 'CPU Load') + value: number | string | null; // The actual measured value (can be a number or string, or null if unavailable) + category: string; // The general category (i.e, 'CPU', 'Memory', 'Processes') + time: number; // Timestamp when the metric was recorded } /** - * This object contains all systeminformation methods, - * metric names, and corresponding data points. + Mapping of systeminformation methods to specific metrics** + * - Each property in this object corresponds to a method in `systeminformation`. + * - Inside each category (e.g., `cpu`, `mem`), we map human-friendly metric names to the `systeminformation` fields. + * - This mapping allows us to **avoid hardcoding metric keys later**. */ const collectedMetrics = { cpu: { @@ -689,7 +41,7 @@ const collectedMetrics = { average_CPU_load_percent: 'avg', current_CPU_load_percent: 'currentLoad', current_CPU_load_user_percent: 'currentLoadUser', - current_CPU_load__system_percent: 'currentLoadSystem', + current_CPU_load_system_percent: 'currentLoadSystem', current_CPU_load_nice_percent: 'currentLoadNice', current_CPU_load_idle_percent: 'currentLoadIdle', current_CPU_load_raw_ticks: 'rawCurrentLoad', @@ -712,25 +64,29 @@ const collectedMetrics = { }; /** - * The `healthHelpers` object with a guaranteed `.collectHealthData()` method. + healthHelpers: A module for collecting system health metrics** + * - Uses async/await to ensure non-blocking data collection. */ const healthHelpers = { /** - * Scrapes metrics for microservices. - * @returns Promise that resolves to an array of health metric objects. + Gathers system health metrics asynchronously** + * - Fetches CPU, memory, and process data using `systeminformation`. + * - Returns a filtered list of valid health metric objects. + * + * @returns A Promise resolving to an array of `HealthMetric` objects. */ async collectHealthData(): Promise { const healthDataCollection: HealthMetric[] = []; - const time = Date.now(); + const time = Date.now(); // Capture timestamp for all metrics - // Obtain core CPU metrics + // **CPU Metrics** await si.cpu() .then(data => { const siMethodName = 'cpu'; for (let metricName in collectedMetrics[siMethodName]) { healthDataCollection.push({ metric: metricName, - value: data[collectedMetrics[siMethodName][metricName]], + value: data[collectedMetrics[siMethodName][metricName]], // Map to `systeminformation` fields category: 'CPU', time, }); @@ -740,7 +96,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain CPU speed metrics + // **CPU Speed Metrics** await si.cpuCurrentSpeed() .then(data => { const siMethodName = 'cpuCurrentSpeed'; @@ -757,7 +113,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain CPU temperature metrics + // **CPU Temperature Metrics** await si.cpuTemperature() .then(data => { const siMethodName = 'cpuTemperature'; @@ -774,7 +130,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain current load metrics + // **CPU Load Metrics** await si.currentLoad() .then(data => { const siMethodName = 'currentLoad'; @@ -791,7 +147,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain memory metrics + // **Memory Metrics** await si.mem() .then(data => { const siMethodName = 'mem'; @@ -808,7 +164,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain process metrics + // **Process Metrics** await si.processes() .then(data => { const siMethodName = 'processes'; @@ -825,7 +181,7 @@ const healthHelpers = { if (err) throw err; }); - // Obtain latency metric + // **Network Latency Metrics** await si.inetLatency() .then(data => { healthDataCollection.push({ @@ -839,18 +195,12 @@ const healthHelpers = { if (err) throw err; }); - // Return the filtered collection. - // For each metric, filter out null/empty values or NaN after converting to number. - return Promise.all(healthDataCollection).then(array => - array.filter(metric => { - if (metric.value === null || metric.value === '') { - return false; - } - const numericValue = - typeof metric.value === 'number' ? metric.value : Number(metric.value); - return !isNaN(numericValue); - }) - ); + // **Filter out invalid values before returning the final array** + return healthDataCollection.filter(metric => { + if (metric.value === null || metric.value === '') return false; + const numericValue = typeof metric.value === 'number' ? metric.value : Number(metric.value); + return !isNaN(numericValue); + }); }, }; diff --git a/chronos_npm_package/controllers/mongo.ts b/chronos_npm_package/controllers/mongo.ts index b12a6af81..ce1d0d9a0 100644 --- a/chronos_npm_package/controllers/mongo.ts +++ b/chronos_npm_package/controllers/mongo.ts @@ -1,1536 +1,3 @@ -// // import mongoose from 'mongoose'; -// // import alert from './alert'; -// // import CommunicationModel from '../models/CommunicationModel'; -// // import ServicesModel from '../models/ServicesModel'; -// // import HealthModelFunc from'../models/HealthModel'; -// // import ContainerInfoFunc from'../models/ContainerInfo'; -// // import KafkaModel from'../models/KafkaModel'; -// // import KubernetesModel from'../models/KubernetesModel'; -// // import collectHealthData from'./healthHelpers'; -// // import MetricsModel from '../models/MetricsModel'; -// // import dockerHelper from './dockerHelper'; -// // import utilities from './utilities.js'; -// // import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel'; - -// // mongoose.set('strictQuery', true); - -// // const mongo = {}; - -// // // This object is used to determine if metrics that are received from setInterval queries should be saved to the db or not. - -// // /** -// // * Initializes connection to MongoDB database using provided URI -// // * @param {Object} database Contains DB type and DB URI -// // */ -// // mongo.connect = async ({ database }) => { -// // console.log('Attempting to connect to database...'); -// // try { -// // await mongoose.connect(`${database.URI}`); -// // console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); -// // } catch ({ message }) { -// // console.log('Error connecting to MongoDB:', message); -// // } -// // }; - -// // /** -// // * Create services collection with each entry representing a microservice -// // * @param {string} microservice Microservice name -// // * @param {number} interval Interval to collect data -// // */ - -// // mongo.services = ({ microservice, interval }) => { -// // console.log(`Saving "${microservice}" to services...`); -// // const newService = { microservice, interval }; -// // const service = new ServicesModel(newService); - -// // service -// // .save() -// // .then(() => console.log(`Added new service "${microservice}"`)) -// // .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); -// // }; - -// // /** -// // * Creates a communications collection if one does not yet exist and -// // * traces the request throughout its life cycle. Will send a notification -// // * to the user if contact information is provided -// // * @param {string} microservice Microservice name -// // * @param {Object|undefined} slack Slack settings -// // * @param {Object|undefined} email Email settings -// // */ -// // mongo.communications = ({ microservice, slack, email }) => { -// // console.log('Recording request cycle...'); - -// // return function (req, res, next) { -// // // Setup newComms object to store data from each request -// // const newComms = { -// // microservice: microservice, -// // endpoint: req.originalUrl, -// // request: req.method, -// // correlatingid: res.getHeaders()['x-correlation-id'], -// // }; -// // // console.log("NEW COMMS",newComms) - -// // res.on('finish', () => { -// // /** -// // * OPTIONAL FEATURE -// // * If user provides contact information, send an alert if the -// // * status code is over or equal to 400 -// // */ -// // if (res.statusCode >= 400) { -// // if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// // if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// // } - -// // /** Add status code and message to newComms */ -// // newComms.responsestatus = res.statusCode; -// // newComms.responsemessage = res.statusMessage; - -// // const communication = new CommunicationModel(newComms); -// // communication -// // .save() -// // .then(() => { -// // console.log('Request cycle saved'); -// // }) -// // .catch(err => console.log(`Error saving communications: `, err.message)); -// // }); - -// // // Call next middleware -// // next(); -// // }; -// // }; - -// // /** -// // * Creates a new table per microservice which records all health data -// // * @param {string} microservice Microservice name -// // * @param {number} interval Interval for continuous data collection -// // */ -// // mongo.health = async ({ microservice, interval, mode }) => { -// // //MetricsModel tracks which metrics are selected in the MetricsContainer component -// // //HealthModel tracks all the cpu health data in each of the services databases - -// // setInterval(() => { -// // collectHealthData() -// // .then(async healthMetrics => { -// // const currentMetrics = await MetricsModel.find({mode}) - -// // if (currentMetrics.length !== healthMetrics.length) { -// // await mongo.addMetrics(healthMetrics, mode, currentMetrics); -// // } -// // const HealthModel = HealthModelFunc(`${microservice}`); -// // await HealthModel.insertMany(healthMetrics); -// // return -// // }) -// // .then(() => { -// // console.log('Health data recorded in MongoDB'); -// // }) -// // .catch(err => console.log('Error inserting health documents: ', err)); -// // }, 10000); -// // }; - -// // /** -// // * !Runs instead of health if dockerized is true -// // * Collects information on the docker container -// // */ -// // mongo.docker = ({ microservice, interval, mode }) => { //:config file, interval of calls, nada -// // // Create collection using name of microservice -// // const containerInfo = ContainerInfoFunc(`${microservice}`); -// // dockerHelper -// // .getDockerContainer(microservice) -// // .then(containerData => { -// // setInterval(() => { -// // dockerHelper -// // .readDockerContainer(containerData) -// // .then(data => { -// // return containerInfo.create(data); -// // }) -// // .catch(err => { -// // throw new Error(err); -// // }); -// // }, interval); -// // }) - -// // .catch(error => { -// // if (error.constructor.name === 'Error') throw error; -// // else throw new Error(error); -// // }); -// // }; - -// // /* -// // This function takes as a parameter the promise returned from the kafkaFetch(). -// // It then takes the returned array of metrics, turns them into documents based on -// // KafkaModel.js, and inserts them into the db at the provided uri with insertMany() -// // */ -// // mongo.serverQuery = async config => { -// // await mongo.saveService(config); -// // await mongo.setQueryOnInterval(config); -// // }; - -// // mongo.saveService = config => { -// // let microservice; -// // if (config.mode === 'kafka') { -// // microservice = 'kafkametrics'; -// // } else if (config.mode === 'kubernetes') { -// // microservice = 'kubernetesmetrics'; -// // } else if (config.mode === 'docker') { -// // microservice = `${config.containerName}`; -// // } else { -// // throw new Error('Unrecongnized mode'); -// // } - -// // const service = new ServicesModel({ -// // microservice: microservice, -// // interval: config.interval, -// // }); - -// // service -// // .save() -// // .then(() => console.log(`Adding "${microservice}" to the services table`)) -// // .catch(err => -// // console.log(`Error saving "${microservice}" to the services table: `, err.message) -// // ); -// // }; - -// // mongo.setQueryOnInterval = async config => { -// // let model; -// // let metricsQuery; - -// // let length = 0; -// // const currentMetricNames = {}; - -// // if (config.mode === 'kafka') { -// // model = KafkaModel; -// // metricsQuery = await utilities.kafkaMetricsQuery; -// // } else if (config.mode === 'kubernetes') { -// // model = KubernetesModel; -// // metricsQuery = await utilities.promMetricsQuery; -// // } else if (config.mode === 'docker') { -// // model = ContainerInfoFunc(`${config.containerName}`); -// // //console.log('setQueryOnInterval line 212 dockerModel:', ContainerInfoFunc(`${config.containerName}`)); -// // metricsQuery = utilities.promMetricsQuery; -// // //console.log('setQueryOnInterval line 214 metricsQuery:', metricsQuery); -// // } else { -// // throw new Error('Unrecognized mode'); -// // } - -// // length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); - -// // console.log('currentMetricNames.length: ', Object.keys(currentMetricNames).length); -// // // Use setInterval to send queries to metrics server and then pipe responses to database -// // setInterval(() => { -// // metricsQuery(config) -// // // This updates the Metrics Model with all chosen metrics. If there are no chosen metrics it sets all available metrics as chosen metrics within the metrics model. -// // .then(async parsedArray => { -// // //await mongo.createGrafanaDashboards(config, parsedArray); -// // console.log('parsedArray.length is: ', parsedArray.length); -// // // This conditional would be used if new metrics are available to be tracked. -// // if (length !== parsedArray.length) { -// // // for (let metric of parsedArray) { -// // // if (!(metric.metric in currentMetricNames)) { -// // // await model.create(metric); -// // // //currentMetricNames[metric] = true; -// // // } -// // // } -// // /////// -// // length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames, model); -// // } - -// // if (config.mode === 'docker') { -// // const documents = []; -// // for (const metric of parsedArray) { -// // /** -// // * This will check if the current metric in the parsed array -// // * evaluates to true within the currentMetricNames object -// // * which is updated by the user when they select/deselect metrics on the electron app -// // * helping to avoid overloading the db with unnecessary data. -// // */ - -// // if (currentMetricNames[metric.metric]) documents.push(model(metric)); -// // } -// // await model.insertMany(parsedArray, err => { -// // if (err) { -// // console.error(err) -// // } else { -// // console.log(`${config.mode} metrics recorded in MongoDB`) -// // } -// // }); -// // } - -// // let allMetrics = await model.find({}); -// // console.log('allMetrics.length: ', allMetrics.length); -// // console.log("🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 start creating dashboards 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑 🟑") -// // await mongo.createGrafanaDashboards(config, allMetrics); -// // console.log("βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… finish creating dashboards βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ… βœ…") -// // }) -// // // .then(() => { -// // // console.log(`${config.mode} metrics recorded in MongoDB`) -// // // }) -// // .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); -// // }, 40000); -// // }; - -// // mongo.getSavedMetricsLength = async (mode, currentMetricNames) => { -// // let currentMetrics = await MetricsModel.find({ mode: mode }); -// // if (currentMetrics.length > 0) { -// // currentMetrics.forEach(el => { -// // const { metric, selected } = el; -// // currentMetricNames[metric] = selected; -// // }); -// // } -// // return currentMetrics.length ? currentMetrics.length : 0; -// // }; - -// // mongo.addMetrics = async (healthMetrics, mode, currentMetricNames) => { -// // //This function adds only the new metrics from metrics model to the metrics database -// // const newMets = []; -// // for (let healthMetric of healthMetrics) { -// // const { metric, category} = healthMetric -// // if (!(metric in currentMetricNames)) { -// // newMets.push({ metric, mode,category }); -// // currentMetricNames[metric.metric] = true; -// // } -// // }; -// // await MetricsModel.create(newMets); -// // return healthMetrics.length; -// // }; -// // // This middleware could be used if the user would like to update their chronos data in real time (immediately after updating saved metrics on the Chronos desktop app), but they would have to expose a URL/port to be queried for the Electron front end. -// // // -// // // mongo.modifyMetrics = (config) => { -// // // return function (req, res, next) { -// // // res.on('finish', () => { -// // // if (req.body.URI === URI && req.body.mode === config.mode) { -// // // currentMetricNames = req.body.metrics; -// // // } -// // // else return next({err: 'Modified metrics passed in to the modifyMetrics route cannot be added', log: 'It is possible that the URI is incorrect, or that you are attempting to add metrics for the incorrect mode type'}) -// // // }); -// // // return next(); -// // // }; -// // // } - -// // mongo.createGrafanaDashboards = async (config, parsedArray) => { -// // try { -// // console.log('In mongo.createGrafanaDashboards!!!') -// // console.log('Calling utilities.getGrafanaDatasource()'); -// // const datasource = await utilities.getGrafanaDatasource(config.grafanaAPIKey); -// // //console.log('Calling utilities.promMetricsQuery()'); -// // //const parsedArray = await utilities.promMetricsQuery(config); -// // //const datasource = await utilities.getGrafanaDatasource(); -// // // console.log("parsedArray is: ", parsedArray.slice(0, 5)); -// // // console.log('parsedArray.length is: ', parsedArray.length); -// // for (let metric of parsedArray) { -// // console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); -// // await utilities.createGrafanaDashboard(metric, datasource, "timeseries", config.grafanaAPIKey); -// // } - -// // // await parsedArray.forEach(async (metric, i) => { -// // // //console.log("metric is: ", metric); -// // // console.log(`creating ${i}th dashboard`); -// // // await utilities.createGrafanaDashboard(metric, datasource); -// // // }); -// // } catch (err) { -// // console.error("error in mongo.createGrafanaDashboards: ", err) -// // } -// // }; - -// // mongo.storeGrafanaAPIKey = async (config) => { -// // try { -// // console.log('In mongo.storeGrafanaAPIKey!!!') -// // await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); -// // console.log('Grafana API Key stored in MongoDB'); -// // } catch (err) { -// // console.error("error in mongo.storeGrafanaAPIKey: ", err); -// // } -// // } - -// // module.exports = mongo; - -// import mongoose from 'mongoose'; -// import alert from './alert'; -// import CommunicationModel from '../models/CommunicationModel'; -// import ServicesModel from '../models/ServicesModel'; -// import HealthModelFunc from '../models/HealthModel'; -// import ContainerInfoFunc from '../models/ContainerInfo'; -// import KafkaModel from '../models/KafkaModel'; -// import KubernetesModel from '../models/KubernetesModel'; -// // Change the import to a named import if collectHealthData is exported that way. -// import { collectHealthData } from './healthHelpers'; -// import MetricsModel from '../models/MetricsModel'; -// import dockerHelper from './dockerHelper'; -// // If utilities exports an object with a property `helpers`, leave the import as-is. -// import utilities from './utilities.js'; -// import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel'; - -// mongoose.set('strictQuery', true); - -// /* --------------------------------------------------------------------------- -// (Interface & mongo object initialization code omitted for brevity; assume -// they remain as in the previous update.) -// --------------------------------------------------------------------------- */ - -// interface MongoController { -// // ... (interface as previously declared) -// connect: ({ database }: { database: { URI: string } }) => Promise; -// services: ({ microservice, interval }: { microservice: string; interval: number }) => void; -// communications: ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => (req: any, res: any, next: any) => void; -// health: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => Promise; -// docker: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => void; -// serverQuery: (config: any) => Promise; -// saveService: (config: any) => void; -// setQueryOnInterval: (config: any) => Promise; -// getSavedMetricsLength: (mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// addMetrics: (healthMetrics: any, mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// createGrafanaDashboards: (config: any, parsedArray: any[]) => Promise; -// storeGrafanaAPIKey: (config: any) => Promise; -// } - -// const mongo: MongoController = {} as MongoController; - -// /* --------------------- CONNECT, SERVICES, COMMUNICATIONS --------------------- */ - -// // (connect, services, and communications remain unchanged from previous update) - -// mongo.connect = async ({ database }: { database: { URI: string } }): Promise => { -// console.log('Attempting to connect to database...'); -// try { -// await mongoose.connect(database.URI); -// console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); -// } catch ({ message }) { -// console.log('Error connecting to MongoDB:', message); -// } -// }; - -// mongo.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// console.log(`Saving "${microservice}" to services...`); -// const newService = { microservice, interval }; -// const service = new ServicesModel(newService); -// service -// .save() -// .then(() => console.log(`Added new service "${microservice}"`)) -// .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); -// }; - -// mongo.communications = ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => { -// console.log('Recording request cycle...'); -// return function (req: any, res: any, next: any): void { -// const newComms = { -// microservice: microservice, -// endpoint: req.originalUrl, -// request: req.method, -// correlatingid: res.getHeaders()['x-correlation-id'], -// }; -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// newComms['responsestatus'] = res.statusCode; -// newComms['responsemessage'] = res.statusMessage; -// const communication = new CommunicationModel(newComms); -// communication -// .save() -// .then(() => console.log('Request cycle saved')) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// }); -// next(); -// }; -// }; - -// /* ---------------------------- HEALTH FUNCTION ---------------------------- */ - -// mongo.health = async ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): Promise => { -// // <-- Error at line 503: collectHealthData() not callable. -// // After updating the import, we now call the function. -// setInterval(() => { -// collectHealthData() // Now callable because we import it as a named function. -// .then(async (healthMetrics: any) => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length !== healthMetrics.length) { -// await mongo.addMetrics(healthMetrics, mode, currentMetrics); -// } -// const HealthModel = HealthModelFunc(`${microservice}`); -// await HealthModel.insertMany(healthMetrics); -// }) -// .then(() => console.log('Health data recorded in MongoDB')) -// .catch(err => console.log('Error inserting health documents: ', err)); -// }, 10000); -// }; - -// /* ----------------------------- DOCKER FUNCTION ----------------------------- */ - -// mongo.docker = ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): void => { -// const containerInfo = ContainerInfoFunc(`${microservice}`); -// dockerHelper -// .getDockerContainer(microservice) -// .then(containerData => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then(data => containerInfo.create(data)) -// .catch(err => { throw new Error(err); }); -// }, interval); -// }) -// .catch(error => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// /* ---------------- SERVER QUERY & SAVE SERVICE ---------------- */ - -// mongo.serverQuery = async (config: any): Promise => { -// await mongo.saveService(config); -// await mongo.setQueryOnInterval(config); -// }; - -// mongo.saveService = (config: any): void => { -// let microservice: string; -// if (config.mode === 'kafka') { -// microservice = 'kafkametrics'; -// } else if (config.mode === 'kubernetes') { -// microservice = 'kubernetesmetrics'; -// } else if (config.mode === 'docker') { -// microservice = `${config.containerName}`; -// } else { -// throw new Error('Unrecognized mode'); -// } -// const service = new ServicesModel({ -// microservice, -// interval: config.interval, -// }); -// service -// .save() -// .then(() => console.log(`Adding "${microservice}" to the services table`)) -// .catch(err => -// console.log(`Error saving "${microservice}" to the services table: `, err.message) -// ); -// }; - -// /* ---------------- SET QUERY ON INTERVAL ---------------- */ - -// mongo.setQueryOnInterval = async (config: any): Promise => { -// let model: any; -// let metricsQuery: any; -// let length = 0; -// const currentMetricNames: { [key: string]: any } = {}; -// if (config.mode === 'kafka') { -// model = KafkaModel; -// // Update: use utilities.helpers.kafkaMetricsQuery instead of utilities.kafkaMetricsQuery -// metricsQuery = await utilities.helpers.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// model = KubernetesModel; -// metricsQuery = await utilities.helpers.promMetricsQuery; -// } else if (config.mode === 'docker') { -// model = ContainerInfoFunc(`${config.containerName}`); -// metricsQuery = utilities.helpers.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } -// length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); -// console.log('currentMetricNames.length: ', Object.keys(currentMetricNames).length); -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// console.log('parsedArray.length is: ', parsedArray.length); -// if (length !== parsedArray.length) { -// length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// if (config.mode === 'docker') { -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) { -// documents.push(model(metric)); -// } -// } -// await model.insertMany(parsedArray, (err: any) => { -// if (err) console.error(err); -// else console.log(`${config.mode} metrics recorded in MongoDB`); -// }); -// } -// const allMetrics = await model.find({}); -// console.log('allMetrics.length: ', allMetrics.length); -// console.log("🟑 start creating dashboards 🟑"); -// await mongo.createGrafanaDashboards(config, allMetrics); -// console.log("βœ… finish creating dashboards βœ…"); -// }) -// // .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); -// // }, 40000); -// // }; - -// // /* ---------------- GET SAVED METRICS LENGTH ---------------- */ - -// // mongo.getSavedMetricsLength = async ( -// // mode: string, -// // currentMetricNames: { [key: string]: any } -// // ): Promise => { -// // const currentMetrics = await MetricsModel.find({ mode }); -// // if (currentMetrics.length > 0) { -// // currentMetrics.forEach(el => { -// // const { metric, selected } = el; -// // currentMetricNames[metric] = selected; -// // }); -// // } -// // return currentMetrics.length; -// // }; - -// // /* ---------------- ADD METRICS ---------------- */ - -// // mongo.addMetrics = async ( -// // healthMetrics: any, -// // mode: string, -// // currentMetricNames: { [key: string]: any } -// // ): Promise => { -// // const newMets: any[] = []; -// // for (let healthMetric of healthMetrics) { -// // const { metric, category } = healthMetric; -// // if (!(metric in currentMetricNames)) { -// // newMets.push({ metric, mode, category }); -// // currentMetricNames[metric] = true; -// // } -// // } -// // await MetricsModel.create(newMets); -// // return healthMetrics.length; -// // }; - -// // /* ---------------- CREATE GRAFANA DASHBOARDS ---------------- */ - -// // mongo.createGrafanaDashboards = async ( -// // config: any, -// // parsedArray: any[] -// // ): Promise => { -// // try { -// // console.log('In mongo.createGrafanaDashboards!!!'); -// // // Update: call utilities.helpers.getGrafanaDatasource -// // const datasource = await utilities.helpers.getGrafanaDatasource(config.grafanaAPIKey); -// // for (let metric of parsedArray) { -// // console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); -// // // Update: call utilities.helpers.createGrafanaDashboard -// // await utilities.helpers.createGrafanaDashboard(metric, datasource, "timeseries", config.grafanaAPIKey); -// // } -// // } catch (err) { -// // console.error("error in mongo.createGrafanaDashboards: ", err); -// // } -// // }; - -// // /* ---------------- STORE GRAFANA API KEY ---------------- */ - -// // mongo.storeGrafanaAPIKey = async (config: any): Promise => { -// // try { -// // console.log('In mongo.storeGrafanaAPIKey!!!'); -// // await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); -// // console.log('Grafana API Key stored in MongoDB'); -// // } catch (err) { -// // console.error("error in mongo.storeGrafanaAPIKey: ", err); -// // } -// // }; - -// // /* ============================================================================= -// // 4. Export the Updated Mongo Controller -// // ============================================================================= */ -// // export default mongo; -// import mongoose from 'mongoose'; -// import alert from './alert'; -// import CommunicationModel from '../models/CommunicationModel'; -// import ServicesModel from '../models/ServicesModel'; -// import HealthModelFunc from '../models/HealthModel'; -// import ContainerInfoFunc from '../models/ContainerInfo'; -// import KafkaModel from '../models/KafkaModel'; -// import KubernetesModel from '../models/KubernetesModel'; -// // Updated import: use the default import since healthHelpers is exported as default. -// import healthHelpers from './healthHelpers'; -// import MetricsModel from '../models/MetricsModel'; -// import dockerHelper from './dockerHelper'; -// // If utilities exports an object with a property `helpers`, leave the import as-is. -// import utilities from './utilities.js'; -// import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel'; - -// mongoose.set('strictQuery', true); - -// /* ----------------------------------------------------------------------------- -// Interface & mongo object initialization code (omitted for brevity) -// ----------------------------------------------------------------------------- */ - -// interface MongoController { -// connect: ({ database }: { database: { URI: string } }) => Promise; -// services: ({ microservice, interval }: { microservice: string; interval: number }) => void; -// communications: ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => (req: any, res: any, next: any) => void; -// health: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => Promise; -// docker: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => void; -// serverQuery: (config: any) => Promise; -// saveService: (config: any) => void; -// setQueryOnInterval: (config: any) => Promise; -// getSavedMetricsLength: (mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// addMetrics: (healthMetrics: any, mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// createGrafanaDashboards: (config: any, parsedArray: any[]) => Promise; -// storeGrafanaAPIKey: (config: any) => Promise; -// } - -// const mongo: MongoController = {} as MongoController; - -// /* --------------------- CONNECT, SERVICES, COMMUNICATIONS --------------------- */ - -// mongo.connect = async ({ database }: { database: { URI: string } }): Promise => { -// console.log('Attempting to connect to database...'); -// try { -// await mongoose.connect(database.URI); -// console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); -// } catch ({ message }) { -// console.log('Error connecting to MongoDB:', message); -// } -// }; - -// mongo.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// console.log(`Saving "${microservice}" to services...`); -// const newService = { microservice, interval }; -// const service = new ServicesModel(newService); -// service -// .save() -// .then(() => console.log(`Added new service "${microservice}"`)) -// .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); -// }; - -// mongo.communications = ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => { -// console.log('Recording request cycle...'); -// return function (req: any, res: any, next: any): void { -// const newComms = { -// microservice: microservice, -// endpoint: req.originalUrl, -// request: req.method, -// correlatingid: res.getHeaders()['x-correlation-id'], -// }; -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// newComms['responsestatus'] = res.statusCode; -// newComms['responsemessage'] = res.statusMessage; -// const communication = new CommunicationModel(newComms); -// communication -// .save() -// .then(() => console.log('Request cycle saved')) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// }); -// next(); -// }; -// }; - -// /* ---------------------------- HEALTH FUNCTION ---------------------------- */ - -// mongo.health = async ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): Promise => { -// setInterval(() => { -// // Call collectHealthData as a property of the default imported healthHelpers object. -// healthHelpers.collectHealthData() -// .then(async (healthMetrics: any) => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length !== healthMetrics.length) { -// await mongo.addMetrics(healthMetrics, mode, currentMetrics); -// } -// const HealthModel = HealthModelFunc(`${microservice}`); -// await HealthModel.insertMany(healthMetrics); -// }) -// .then(() => console.log('Health data recorded in MongoDB')) -// .catch(err => console.log('Error inserting health documents: ', err)); -// }, 10000); -// }; - -// /* ----------------------------- DOCKER FUNCTION ----------------------------- */ - -// mongo.docker = ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): void => { -// const containerInfo = ContainerInfoFunc(`${microservice}`); -// dockerHelper -// .getDockerContainer(microservice) -// .then(containerData => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then(data => containerInfo.create(data)) -// .catch(err => { -// throw new Error(err); -// }); -// }, interval); -// }) -// .catch(error => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// /* ---------------- SERVER QUERY & SAVE SERVICE ---------------- */ - -// mongo.serverQuery = async (config: any): Promise => { -// await mongo.saveService(config); -// await mongo.setQueryOnInterval(config); -// }; - -// mongo.saveService = (config: any): void => { -// let microservice: string; -// if (config.mode === 'kafka') { -// microservice = 'kafkametrics'; -// } else if (config.mode === 'kubernetes') { -// microservice = 'kubernetesmetrics'; -// } else if (config.mode === 'docker') { -// microservice = `${config.containerName}`; -// } else { -// throw new Error('Unrecognized mode'); -// } -// const service = new ServicesModel({ -// microservice, -// interval: config.interval, -// }); -// service -// .save() -// .then(() => console.log(`Adding "${microservice}" to the services table`)) -// .catch(err => -// console.log(`Error saving "${microservice}" to the services table: `, err.message) -// ); -// }; - -// /* ---------------- SET QUERY ON INTERVAL ---------------- */ - -// mongo.setQueryOnInterval = async (config: any): Promise => { -// let model: any; -// let metricsQuery: any; -// let length = 0; -// const currentMetricNames: { [key: string]: any } = {}; -// if (config.mode === 'kafka') { -// model = KafkaModel; -// metricsQuery = await utilities.helpers.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// model = KubernetesModel; -// metricsQuery = await utilities.helpers.promMetricsQuery; -// } else if (config.mode === 'docker') { -// model = ContainerInfoFunc(`${config.containerName}`); -// metricsQuery = utilities.helpers.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } -// length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); -// console.log('currentMetricNames.length: ', Object.keys(currentMetricNames).length); -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// console.log('parsedArray.length is: ', parsedArray.length); -// if (length !== parsedArray.length) { -// length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// if (config.mode === 'docker') { -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) { -// documents.push(model(metric)); -// } -// } -// await model.insertMany(parsedArray, (err: any) => { -// if (err) console.error(err); -// else console.log(`${config.mode} metrics recorded in MongoDB`); -// }); -// } -// const allMetrics = await model.find({}); -// console.log('allMetrics.length: ', allMetrics.length); -// console.log("🟑 start creating dashboards 🟑"); -// await mongo.createGrafanaDashboards(config, allMetrics); -// console.log("βœ… finish creating dashboards βœ…"); -// }) -// .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); -// }, 40000); -// }; - -// /* ---------------- GET SAVED METRICS LENGTH ---------------- */ - -// mongo.getSavedMetricsLength = async ( -// mode: string, -// currentMetricNames: { [key: string]: any } -// ): Promise => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length > 0) { -// currentMetrics.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.length; -// }; - -// /* ---------------- ADD METRICS ---------------- */ - -// mongo.addMetrics = async ( -// healthMetrics: any, -// mode: string, -// currentMetricNames: { [key: string]: any } -// ): Promise => { -// const newMets: any[] = []; -// for (let healthMetric of healthMetrics) { -// const { metric, category } = healthMetric; -// if (!(metric in currentMetricNames)) { -// newMets.push({ metric, mode, category }); -// currentMetricNames[metric] = true; -// } -// } -// await MetricsModel.create(newMets); -// return healthMetrics.length; -// }; - -// /* ---------------- CREATE GRAFANA DASHBOARDS ---------------- */ - -// mongo.createGrafanaDashboards = async ( -// config: any, -// parsedArray: any[] -// ): Promise => { -// try { -// console.log('In mongo.createGrafanaDashboards!!!'); -// const datasource = await utilities.helpers.getGrafanaDatasource(config.grafanaAPIKey); -// for (let metric of parsedArray) { -// console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); -// await utilities.helpers.createGrafanaDashboard(metric, datasource, "timeseries", config.grafanaAPIKey); -// } -// } catch (err) { -// console.error("error in mongo.createGrafanaDashboards: ", err); -// } -// }; - -// /* ---------------- STORE GRAFANA API KEY ---------------- */ - -// mongo.storeGrafanaAPIKey = async (config: any): Promise => { -// try { -// console.log('In mongo.storeGrafanaAPIKey!!!'); -// await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); -// console.log('Grafana API Key stored in MongoDB'); -// } catch (err) { -// console.error("error in mongo.storeGrafanaAPIKey: ", err); -// } -// }; - -// /* ============================================================================= -// Export the Updated Mongo Controller -// ============================================================================= */ -// export default mongo; - -// import mongoose from 'mongoose'; -// import alert from './alert'; -// import CommunicationModel from '../models/CommunicationModel'; -// import ServicesModel from '../models/ServicesModel'; -// import HealthModelFunc from '../models/HealthModel'; -// import ContainerInfoFunc from '../models/ContainerInfo'; -// import KafkaModel from '../models/KafkaModel'; -// import KubernetesModel from '../models/KubernetesModel'; -// // Updated import: use the default import since healthHelpers is exported as default. -// import healthHelpers from './healthHelpers'; -// import MetricsModel from '../models/MetricsModel'; -// import dockerHelper from './dockerHelper'; -// // If utilities exports an object with a property `helpers`, leave the import as-is. -// import utilities from './utilities.js'; -// import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel'; - -// mongoose.set('strictQuery', true); - -// /* ----------------------------------------------------------------------------- -// Interface & mongo object initialization code (omitted for brevity) -// ----------------------------------------------------------------------------- */ - -// interface MongoController { -// connect: ({ database }: { database: { URI: string } }) => Promise; -// services: ({ microservice, interval }: { microservice: string; interval: number }) => void; -// communications: ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => (req: any, res: any, next: any) => void; -// health: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => Promise; -// docker: ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string; }) => void; -// serverQuery: (config: any) => Promise; -// saveService: (config: any) => void; -// setQueryOnInterval: (config: any) => Promise; -// getSavedMetricsLength: (mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// addMetrics: (healthMetrics: any, mode: string, currentMetricNames: { [key: string]: any }) => Promise; -// createGrafanaDashboards: (config: any, parsedArray: any[]) => Promise; -// storeGrafanaAPIKey: (config: any) => Promise; -// } - -// const mongo: MongoController = {} as MongoController; - -// /* --------------------- CONNECT, SERVICES, COMMUNICATIONS --------------------- */ - -// mongo.connect = async ({ database }: { database: { URI: string } }): Promise => { -// console.log('Attempting to connect to database...'); -// try { -// await mongoose.connect(database.URI); -// console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); -// } catch ({ message }) { -// console.log('Error connecting to MongoDB:', message); -// } -// }; - -// mongo.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// console.log(`Saving "${microservice}" to services...`); -// const newService = { microservice, interval }; -// const service = new ServicesModel(newService); -// service -// .save() -// .then(() => console.log(`Added new service "${microservice}"`)) -// .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); -// }; - -// mongo.communications = ({ -// microservice, -// slack, -// email, -// }: { -// microservice: string; -// slack?: any; -// email?: any; -// }) => { -// console.log('Recording request cycle...'); -// return function (req: any, res: any, next: any): void { -// const newComms = { -// microservice: microservice, -// endpoint: req.originalUrl, -// request: req.method, -// correlatingid: res.getHeaders()['x-correlation-id'], -// }; -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// newComms['responsestatus'] = res.statusCode; -// newComms['responsemessage'] = res.statusMessage; -// const communication = new CommunicationModel(newComms); -// communication -// .save() -// .then(() => console.log('Request cycle saved')) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// }); -// next(); -// }; -// }; - -// /* ---------------------------- HEALTH FUNCTION ---------------------------- */ - -// mongo.health = async ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): Promise => { -// setInterval(() => { -// // Use non-null assertion to ensure collectHealthData is defined. -// healthHelpers.collectHealthData!() -// .then(async (healthMetrics: any) => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length !== healthMetrics.length) { -// await mongo.addMetrics(healthMetrics, mode, currentMetrics); -// } -// const HealthModel = HealthModelFunc(`${microservice}`); -// await HealthModel.insertMany(healthMetrics); -// }) -// .then(() => console.log('Health data recorded in MongoDB')) -// .catch(err => console.log('Error inserting health documents: ', err)); -// }, 10000); -// }; - -// /* ----------------------------- DOCKER FUNCTION ----------------------------- */ - -// mongo.docker = ({ -// microservice, -// interval, -// mode, -// }: { -// microservice: string; -// interval: number; -// mode: string; -// }): void => { -// const containerInfo = ContainerInfoFunc(`${microservice}`); -// dockerHelper -// .getDockerContainer(microservice) -// .then(containerData => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then(data => containerInfo.create(data)) -// .catch(err => { -// throw new Error(err); -// }); -// }, interval); -// }) -// .catch(error => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// /* ---------------- SERVER QUERY & SAVE SERVICE ---------------- */ - -// mongo.serverQuery = async (config: any): Promise => { -// await mongo.saveService(config); -// await mongo.setQueryOnInterval(config); -// }; - -// mongo.saveService = (config: any): void => { -// let microservice: string; -// if (config.mode === 'kafka') { -// microservice = 'kafkametrics'; -// } else if (config.mode === 'kubernetes') { -// microservice = 'kubernetesmetrics'; -// } else if (config.mode === 'docker') { -// microservice = `${config.containerName}`; -// } else { -// throw new Error('Unrecognized mode'); -// } -// const service = new ServicesModel({ -// microservice, -// interval: config.interval, -// }); -// service -// .save() -// .then(() => console.log(`Adding "${microservice}" to the services table`)) -// .catch(err => -// console.log(`Error saving "${microservice}" to the services table: `, err.message) -// ); -// }; - -// /* ---------------- SET QUERY ON INTERVAL ---------------- */ - -// mongo.setQueryOnInterval = async (config: any): Promise => { -// let model: any; -// let metricsQuery: any; -// let length = 0; -// const currentMetricNames: { [key: string]: any } = {}; -// if (config.mode === 'kafka') { -// model = KafkaModel; -// metricsQuery = await utilities.helpers.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// model = KubernetesModel; -// metricsQuery = await utilities.helpers.promMetricsQuery; -// } else if (config.mode === 'docker') { -// model = ContainerInfoFunc(`${config.containerName}`); -// metricsQuery = utilities.helpers.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } -// length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); -// console.log('currentMetricNames.length: ', Object.keys(currentMetricNames).length); -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// console.log('parsedArray.length is: ', parsedArray.length); -// if (length !== parsedArray.length) { -// length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// if (config.mode === 'docker') { -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) { -// documents.push(model(metric)); -// } -// } -// await model.insertMany(parsedArray, (err: any) => { -// if (err) console.error(err); -// else console.log(`${config.mode} metrics recorded in MongoDB`); -// }); -// } -// const allMetrics = await model.find({}); -// console.log('allMetrics.length: ', allMetrics.length); -// console.log("🟑 start creating dashboards 🟑"); -// await mongo.createGrafanaDashboards(config, allMetrics); -// console.log("βœ… finish creating dashboards βœ…"); -// }) -// .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); -// }, 40000); -// }; - -// /* ---------------- GET SAVED METRICS LENGTH ---------------- */ - -// mongo.getSavedMetricsLength = async ( -// mode: string, -// currentMetricNames: { [key: string]: any } -// ): Promise => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length > 0) { -// currentMetrics.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.length; -// }; - -// /* ---------------- ADD METRICS ---------------- */ - -// mongo.addMetrics = async ( -// healthMetrics: any, -// mode: string, -// currentMetricNames: { [key: string]: any } -// ): Promise => { -// const newMets: any[] = []; -// for (let healthMetric of healthMetrics) { -// const { metric, category } = healthMetric; -// if (!(metric in currentMetricNames)) { -// newMets.push({ metric, mode, category }); -// currentMetricNames[metric] = true; -// } -// } -// await MetricsModel.create(newMets); -// return healthMetrics.length; -// }; - -// /* ---------------- CREATE GRAFANA DASHBOARDS ---------------- */ - -// // mongo.createGrafanaDashboards = async ( -// // config: any, -// // parsedArray: any[] -// // ): Promise => { -// // try { -// // console.log('In mongo.createGrafanaDashboards!!!'); -// // const datasource = await utilities.helpers.getGrafanaDatasource(config.grafanaAPIKey); -// // for (let metric of parsedArray) { -// // console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); -// // await utilities.helpers.createGrafanaDashboard(metric, datasource, "timeseries", config.grafanaAPIKey); -// // } -// // } catch (err) { -// // console.error("error in mongo.createGrafanaDashboards: ", err); -// // } -// // }; - -// // /* ---------------- STORE GRAFANA API KEY ---------------- */ - -// // mongo.storeGrafanaAPIKey = async (config: any): Promise => { -// // try { -// // console.log('In mongo.storeGrafanaAPIKey!!!'); -// // await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); -// // console.log('Grafana API Key stored in MongoDB'); -// // } catch (err) { -// // console.error("error in mongo.storeGrafanaAPIKey: ", err); -// // } -// // }; - -// /* ============================================================================= -// Export the Updated Mongo Controller -// ============================================================================= */ -// export default mongo; -// Example: utilities.ts -// import mongoose from 'mongoose'; -// import alert from './alert'; -// import CommunicationModel from '../models/CommunicationModel'; -// import ServicesModel from '../models/ServicesModel'; -// import HealthModelFunc from '../models/HealthModel'; -// import ContainerInfoFunc from '../models/ContainerInfo'; -// import KafkaModel from '../models/KafkaModel'; -// import KubernetesModel from '../models/KubernetesModel'; -// // We assume `collectHealthData` is a default or named import from 'healthHelpers'. -// import collectHealthData from './healthHelpers'; -// import MetricsModel from '../models/MetricsModel'; -// import dockerHelper from './dockerHelper'; -// // We assume `utilities` exports an object with the missing Grafana functions: -// import utilities from './utilities'; -// import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel'; - -// mongoose.set('strictQuery', true); - -// const mongo: any = {}; // or define a proper TypeScript interface if desired - -// /** 1) Connect to MongoDB */ -// mongo.connect = async ({ database }: { database: { URI: string } }) => { -// console.log('Attempting to connect to database...'); -// try { -// await mongoose.connect(database.URI); -// console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); -// } catch (error: any) { -// console.log('Error connecting to MongoDB:', error.message); -// } -// }; - -// /** 2) Create services collection */ -// mongo.services = ({ microservice, interval }: { microservice: string; interval: number }) => { -// console.log(`Saving "${microservice}" to services...`); -// const newService = { microservice, interval }; -// const service = new ServicesModel(newService); - -// service -// .save() -// .then(() => console.log(`Added new service "${microservice}"`)) -// .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); -// }; - -// /** 3) Logs each request/response cycle, optionally sends Slack/Email alerts */ -// mongo.communications = ({ microservice, slack, email }: any) => { -// console.log('Recording request cycle...'); -// return function (req: any, res: any, next: any) { -// const newComms = { -// microservice, -// endpoint: req.originalUrl, -// request: req.method, -// correlatingid: res.getHeaders()['x-correlation-id'], -// }; -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// newComms['responsestatus'] = res.statusCode; -// newComms['responsemessage'] = res.statusMessage; - -// const communication = new CommunicationModel(newComms); -// communication -// .save() -// .then(() => console.log('Request cycle saved')) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// }); -// next(); -// }; -// }; - -// /** 4) Collect health data in intervals if not dockerized */ -// mongo.health = async ({ microservice, interval, mode }: any) => { -// setInterval(() => { -// collectHealthData() -// .then(async (healthMetrics: any[]) => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length !== healthMetrics.length) { -// await mongo.addMetrics(healthMetrics, mode, currentMetrics); -// } -// const HealthModel = HealthModelFunc(`${microservice}`); -// await HealthModel.insertMany(healthMetrics); -// }) -// .then(() => { -// console.log('Health data recorded in MongoDB'); -// }) -// .catch((err: any) => console.log('Error inserting health documents: ', err)); -// }, interval || 10000); -// }; - -// /** 5) If dockerized, collects container info on intervals */ -// mongo.docker = ({ microservice, interval, mode }: any) => { -// const containerInfo = ContainerInfoFunc(`${microservice}`); -// dockerHelper -// .getDockerContainer(microservice) -// .then(containerData => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then(data => containerInfo.create(data)) -// .catch(err => { -// throw new Error(err); -// }); -// }, interval || 10000); -// }) -// .catch(error => { -// throw new Error(error); -// }); -// }; - -// /** 6) Called for 'kafka', 'kubernetes', or 'docker' mode to set queries on intervals */ -// mongo.serverQuery = async (config: any) => { -// await mongo.saveService(config); -// await mongo.setQueryOnInterval(config); -// }; - -// /** Save the microservice entry in 'services' table */ -// mongo.saveService = (config: any) => { -// let microservice; -// if (config.mode === 'kafka') { -// microservice = 'kafkametrics'; -// } else if (config.mode === 'kubernetes') { -// microservice = 'kubernetesmetrics'; -// } else if (config.mode === 'docker') { -// microservice = `${config.containerName}`; -// } else { -// throw new Error('Unrecognized mode'); -// } - -// const service = new ServicesModel({ -// microservice, -// interval: config.interval, -// }); -// service -// .save() -// .then(() => console.log(`Adding "${microservice}" to the services table`)) -// .catch(err => -// console.log(`Error saving "${microservice}" to the services table: `, err.message) -// ); -// }; - -// /** Schedules queries to Prometheus or Kafka at intervals, then calls createGrafanaDashboards */ -// mongo.setQueryOnInterval = async (config: any) => { -// let model: any; -// let metricsQuery: Function; -// let length = 0; -// const currentMetricNames: Record = {}; - -// if (config.mode === 'kafka') { -// model = KafkaModel; -// metricsQuery = utilities.kafkaMetricsQuery; // if it's a function, no `await` needed -// } else if (config.mode === 'kubernetes') { -// model = KubernetesModel; -// metricsQuery = utilities.promMetricsQuery; -// } else if (config.mode === 'docker') { -// model = ContainerInfoFunc(`${config.containerName}`); -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } - -// length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// console.log('parsedArray.length is: ', parsedArray.length); -// if (length !== parsedArray.length) { -// length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames); -// } - -// if (config.mode === 'docker') { -// const documents: any[] = []; -// for (const metric of parsedArray) { -// // Only store if user has 'selected' the metric -// if (currentMetricNames[metric.metric]) { -// documents.push(model(metric)); -// } -// } -// await model.insertMany(documents, (err: any) => { -// if (err) console.error(err); -// else console.log(`${config.mode} metrics recorded in MongoDB`); -// }); -// } - -// const allMetrics = await model.find({}); -// console.log('allMetrics.length: ', allMetrics.length); - -// console.log('🟑 start creating dashboards 🟑'); -// await mongo.createGrafanaDashboards(config, allMetrics); -// console.log('βœ… finish creating dashboards βœ…'); -// }) -// .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); -// }, config.interval || 40000); -// }; - -// /** Helper to map saved metrics & user selections in the DB */ -// mongo.getSavedMetricsLength = async (mode: string, currentMetricNames: Record) => { -// const currentMetrics = await MetricsModel.find({ mode }); -// if (currentMetrics.length > 0) { -// currentMetrics.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.length; -// }; - -// /** Insert newly discovered metrics into the Metrics collection */ -// mongo.addMetrics = async (healthMetrics: any[], mode: string, currentMetricNames: Record) => { -// const newMets: any[] = []; -// for (let healthMetric of healthMetrics) { -// const { metric, category } = healthMetric; -// if (!(metric in currentMetricNames)) { -// newMets.push({ metric, mode, category }); -// currentMetricNames[metric] = true; -// } -// } -// await MetricsModel.create(newMets); -// return healthMetrics.length; -// }; - -// /** Auto-provisions Grafana dashboards for the newly collected metrics */ -// mongo.createGrafanaDashboards = async (config: any, parsedArray: any[]) => { -// try { -// console.log('In mongo.createGrafanaDashboards!!!'); -// console.log('Calling utilities.getGrafanaDatasource()'); - -// // -> calls the newly reintroduced function -// const datasource = await utilities.getGrafanaDatasource(config.grafanaAPIKey); - -// for (let metric of parsedArray) { -// console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); -// // -> calls the newly reintroduced function -// await utilities.createGrafanaDashboard(metric, datasource, 'timeseries', config.grafanaAPIKey); -// } -// } catch (err) { -// console.error('error in mongo.createGrafanaDashboards: ', err); -// } -// }; - -// /** Optionally store the Grafana API Key in MongoDB */ -// mongo.storeGrafanaAPIKey = async (config: any) => { -// try { -// console.log('In mongo.storeGrafanaAPIKey!!!'); -// await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); -// console.log('Grafana API Key stored in MongoDB'); -// } catch (err) { -// console.error('error in mongo.storeGrafanaAPIKey: ', err); -// } -// }; - -// export default mongo; import mongoose from 'mongoose'; import alert from './alert.js'; import CommunicationModel from '../models/CommunicationModel.js'; @@ -1540,42 +7,52 @@ import ContainerInfoFunc from '../models/ContainerInfo.js'; import KafkaModel from '../models/KafkaModel.js'; import KubernetesModel from '../models/KubernetesModel.js'; -// πŸ‘‡ Import the *object* from healthHelpers, not a direct function +// Imports the object instead of a direct function import healthHelpers from './healthHelpers.js'; - import MetricsModel from '../models/MetricsModel.js'; import dockerHelper from './dockerHelper.js'; import utilities from './utilities.js'; import GrafanaAPIKeyModel from '../models/GrafanaAPIKeyModel.js'; +//Enforces strict query behavior in Mongoose mongoose.set('strictQuery', true); +// Define the mongo object to store functions const mongo: any = {}; -/** 1) Connect to MongoDB */ +/** + * Connect to MongoDB + * - Uses an async function to establish a connection. + * - Catches errors to prevent crashing on failed connection attempts. + */ mongo.connect = async ({ database }: { database: { URI: string } }) => { console.log('Attempting to connect to database...'); try { await mongoose.connect(database.URI); - console.log(`MongoDB database connected at ${database.URI.slice(0, 20)}...`); + console.log(`βœ… MongoDB database connected at ${database.URI.slice(0, 20)}...`); } catch (error: any) { - console.log('Error connecting to MongoDB:', error.message); + console.log('❌ Error connecting to MongoDB:', error.message); } }; -/** 2) Create services collection */ +/** + Create services collection + * - Stores details about microservices and their monitoring intervals. + */ mongo.services = ({ microservice, interval }: { microservice: string; interval: number }) => { console.log(`Saving "${microservice}" to services...`); - const newService = { microservice, interval }; - const service = new ServicesModel(newService); + const newService = new ServicesModel({ microservice, interval }); - service + newService .save() - .then(() => console.log(`Added new service "${microservice}"`)) - .catch(err => console.log(`Error saving service "${microservice}": `, err.message)); + .then(() => console.log(`βœ… Added new service "${microservice}"`)) + .catch(err => console.log(`❌ Error saving service "${microservice}": `, err.message)); }; -/** 3) Logs each request/response cycle, optionally sends Slack/Email alerts */ +/** + Logs each request/response cycle + * - Optionally sends Slack/Email alerts if an error (status >= 400) is encountered. + */ mongo.communications = ({ microservice, slack, email }: any) => { console.log('Recording request cycle...'); return function (req: any, res: any, next: any) { @@ -1585,6 +62,7 @@ mongo.communications = ({ microservice, slack, email }: any) => { request: req.method, correlatingid: res.getHeaders()['x-correlation-id'], }; + res.on('finish', () => { if (res.statusCode >= 400) { if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); @@ -1593,48 +71,45 @@ mongo.communications = ({ microservice, slack, email }: any) => { newComms['responsestatus'] = res.statusCode; newComms['responsemessage'] = res.statusMessage; - const communication = new CommunicationModel(newComms); - communication + new CommunicationModel(newComms) .save() - .then(() => console.log('Request cycle saved')) - .catch(err => console.log(`Error saving communications: `, err.message)); + .then(() => console.log('βœ… Request cycle saved')) + .catch(err => console.log(`❌ Error saving communications: `, err.message)); }); + next(); }; }; -/** 4) Collect health data in intervals if not dockerized */ +/** + Collects system health data at specified intervals + * - Uses `healthHelpers.collectHealthData()` to gather CPU, memory, and process metrics. + */ mongo.health = async ({ microservice, interval, mode }: any) => { - // default to 10 seconds if interval not provided - const pollInterval = interval || 10000; + const pollInterval = interval || 10000; // Default interval: 10 seconds setInterval(() => { - // πŸ‘‡ Call the object's method healthHelpers .collectHealthData() .then(async (healthMetrics: any[]) => { - // Insert your logic for storing metrics const currentMetrics = await MetricsModel.find({ mode }); if (currentMetrics.length !== healthMetrics.length) { await mongo.addMetrics(healthMetrics, mode, currentMetrics); } - // const HealthModel = HealthModelFunc(`${microservice}`); - // await HealthModel.insertMany(healthMetrics); - await HealthModelFunc.insertMany(healthMetrics); - - }) - .then(() => { - console.log('Health data recorded in MongoDB'); + await HealthModelFunc.insertMany(healthMetrics); }) - .catch((err: any) => console.log('Error inserting health documents: ', err)); + .then(() => console.log('βœ… Health data recorded in MongoDB')) + .catch((err: any) => console.log('❌ Error inserting health documents:', err)); }, pollInterval); }; -/** 5) If dockerized, collects container info on intervals */ +/** + Collects container info if running in Docker + * - Uses `dockerHelper` to fetch container stats at regular intervals. + */ mongo.docker = ({ microservice, interval, mode }: any) => { const pollInterval = interval || 10000; - // const containerInfo = ContainerInfoFunc(`${microservice}`); dockerHelper .getDockerContainer(microservice) .then(containerData => { @@ -1652,13 +127,17 @@ mongo.docker = ({ microservice, interval, mode }: any) => { }); }; -/** 6) Called for 'kafka', 'kubernetes', or 'docker' mode to set queries on intervals */ +/** + Runs queries for Kafka, Kubernetes, or Docker at intervals + */ mongo.serverQuery = async (config: any) => { await mongo.saveService(config); await mongo.setQueryOnInterval(config); }; -/** Save the microservice entry in 'services' table */ +/** + Saves microservice info in the 'services' table + */ mongo.saveService = (config: any) => { let microservice; if (config.mode === 'kafka') { @@ -1668,22 +147,18 @@ mongo.saveService = (config: any) => { } else if (config.mode === 'docker') { microservice = `${config.containerName}`; } else { - throw new Error('Unrecognized mode'); + throw new Error('❌ Unrecognized mode'); } - const service = new ServicesModel({ - microservice, - interval: config.interval, - }); - service + new ServicesModel({ microservice, interval: config.interval }) .save() - .then(() => console.log(`Adding "${microservice}" to the services table`)) - .catch(err => - console.log(`Error saving "${microservice}" to the services table: `, err.message) - ); + .then(() => console.log(`βœ… Added "${microservice}" to services`)) + .catch(err => console.log(`❌ Error saving "${microservice}": `, err.message)); }; -/** Schedules queries to Prometheus or Kafka at intervals, then calls createGrafanaDashboards */ +/** + Collects new metrics at intervals and updates dashboards + */ mongo.setQueryOnInterval = async (config: any) => { let model: any; let metricsQuery: Function; @@ -1698,10 +173,9 @@ mongo.setQueryOnInterval = async (config: any) => { metricsQuery = utilities.helpers.promMetricsQuery; } else if (config.mode === 'docker') { model = ContainerInfoFunc; - // model = ContainerInfoFunc(`${config.containerName}`); metricsQuery = utilities.helpers.promMetricsQuery; } else { - throw new Error('Unrecognized mode'); + throw new Error('❌ Unrecognized mode'); } length = await mongo.getSavedMetricsLength(config.mode, currentMetricNames); @@ -1709,94 +183,41 @@ mongo.setQueryOnInterval = async (config: any) => { setInterval(() => { metricsQuery(config) .then(async (parsedArray: any[]) => { - console.log('parsedArray.length is: ', parsedArray.length); + console.log('parsedArray.length:', parsedArray.length); if (length !== parsedArray.length) { length = await mongo.addMetrics(parsedArray, config.mode, currentMetricNames); } - if (config.mode === 'docker') { - const documents: any[] = []; - for (const metric of parsedArray) { - if (currentMetricNames[metric.metric]) { - documents.push(model(metric)); - } - } - await model.insertMany(documents, (err: any) => { - if (err) console.error(err); - else console.log(`${config.mode} metrics recorded in MongoDB`); - }); - } - - const allMetrics = await model.find({}); - console.log('allMetrics.length: ', allMetrics.length); - - console.log('🟑 start creating dashboards 🟑'); - await mongo.createGrafanaDashboards(config, allMetrics); - console.log('βœ… finish creating dashboards βœ…'); + await mongo.createGrafanaDashboards(config, await model.find({})); }) - .catch(err => console.log(`Error inserting ${config.mode} documents in MongoDB: `, err)); + .catch(err => console.log(`❌ Error inserting ${config.mode} documents in MongoDB:`, err)); }, config.interval || 40000); }; -/** Helper to map saved metrics & user selections in the DB */ -mongo.getSavedMetricsLength = async (mode: string, currentMetricNames: Record) => { - const currentMetrics = await MetricsModel.find({ mode }); - if (currentMetrics.length > 0) { - currentMetrics.forEach((el: any) => { - const { metric, selected } = el; - currentMetricNames[metric] = selected; - }); - } - return currentMetrics.length; -}; - -/** Insert newly discovered metrics into the Metrics collection */ -mongo.addMetrics = async ( - healthMetrics: any[], - mode: string, - currentMetricNames: Record -) => { - const newMets: any[] = []; - for (let healthMetric of healthMetrics) { - const { metric, category } = healthMetric; - if (!(metric in currentMetricNames)) { - newMets.push({ metric, mode, category }); - currentMetricNames[metric] = true; - } - } - await MetricsModel.create(newMets); - return healthMetrics.length; -}; - -/** Auto-provisions Grafana dashboards for the newly collected metrics */ +/** + Automatically creates Grafana dashboards for collected metrics + */ mongo.createGrafanaDashboards = async (config: any, parsedArray: any[]) => { try { - console.log('In mongo.createGrafanaDashboards!!!'); - console.log('Calling utilities.getGrafanaDatasource()'); - + console.log('πŸ“Š Creating Grafana Dashboards'); const datasource = await utilities.helpers.getGrafanaDatasource(config.grafanaAPIKey); for (let metric of parsedArray) { - console.log(`πŸŽ‰ creating dashboard πŸŽ‰`); - await utilities.helpers.createGrafanaDashboard( - metric, - datasource, - 'timeseries', - config.grafanaAPIKey - ); + await utilities.helpers.createGrafanaDashboard(metric, datasource, 'timeseries', config.grafanaAPIKey); } } catch (err) { - console.error('error in mongo.createGrafanaDashboards: ', err); + console.error('❌ Error in mongo.createGrafanaDashboards:', err); } }; -/** Optionally store the Grafana API Key in MongoDB */ +/** + Stores the Grafana API Key in MongoDB + */ mongo.storeGrafanaAPIKey = async (config: any) => { try { - console.log('In mongo.storeGrafanaAPIKey!!!'); await GrafanaAPIKeyModel.create({ token: config.grafanaAPIKey }); - console.log('Grafana API Key stored in MongoDB'); + console.log('βœ… Grafana API Key stored'); } catch (err) { - console.error('error in mongo.storeGrafanaAPIKey: ', err); + console.error('❌ Error storing Grafana API Key:', err); } }; diff --git a/chronos_npm_package/controllers/postgres.ts b/chronos_npm_package/controllers/postgres.ts index 4801e34e7..cc14b0753 100644 --- a/chronos_npm_package/controllers/postgres.ts +++ b/chronos_npm_package/controllers/postgres.ts @@ -1,2153 +1,77 @@ - -// File: postgres.ts - -// You can either keep these require() statements or convert them to ES module imports -// import { Client } from 'pg'; -// import pkg from 'pg'; -// const { Client } = pkg; -// // import * as pg from 'pg'; const { Client } = pg -// // // Rename alert to avoid conflict with the DOM/global alert function -// const alertModule = require('./alert.js'); -// const { collectHealthData } = require('./healthHelpers.js'); -// const dockerHelper = require('./dockerHelper.js'); -// // const dockerHelper = require('./.js'); -// const utilities = require('./utilities.js'); - -// let client: any; - -// // In this example we type postgres as any. Later you might define a proper interface. -// const postgres: any = {}; - -// /** -// * Initializes connection to PostgreSQL database using provided URI -// * @param database Contains DB type and DB URI -// */ -// postgres.connect = async ({ database }: { database: { URI: string } }): Promise => { -// try { -// // Connect to user's database -// client = new Client({ connectionString: database.URI }); -// await client.connect(); - -// // Print success message -// console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); -// } catch (error: any) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', error.message); -// } -// }; - -// /** -// * Create services table with each entry representing a microservice. -// * @param microservice Microservice name -// * @param interval Interval to collect data -// */ -// postgres.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// // Create services table if it does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS services ( -// _id SERIAL PRIMARY KEY NOT NULL, -// microservice VARCHAR(248) NOT NULL UNIQUE, -// interval INTEGER NOT NULL)`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// client.query( -// `CREATE TABLE IF NOT EXISTS metrics ( -// _id SERIAL PRIMARY KEY NOT NULL, -// metric TEXT NOT NULL UNIQUE, -// selected BOOLEAN, -// mode TEXT NOT NULL)`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// // Insert microservice name and interval into services table -// const queryString = ` -// INSERT INTO services (microservice, interval) -// VALUES ($1, $2) -// ON CONFLICT (microservice) DO NOTHING;`; - -// const values = [microservice, interval]; - -// client.query(queryString, values, (err: any, result: any) => { -// if (err) throw err; -// console.log(`Microservice "${microservice}" recorded in services table`); -// }); -// }; - -// /** -// * Creates a communications table if one does not yet exist and -// * traces the request throughout its life cycle. Will send a notification -// * to the user if contact information is provided. -// * @param microservice Microservice name -// * @param slack Slack settings (optional) -// * @param email Email settings (optional) -// */ -// postgres.communications = ({ microservice, slack, email }: { microservice: string; slack?: any; email?: any }) => { -// // Create communications table if one does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// endpoint varchar(248) NOT NULL, -// request varchar(16) NOT NULL, -// responsestatus INTEGER NOT NULL, -// responsemessage varchar(500) NOT NULL, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// return (req: any, res: any, next: any) => { -// // ID persists throughout request lifecycle -// const correlatingId = res.getHeaders()['x-correlation-id']; - -// // Target endpoint -// const endpoint = req.originalUrl; -// // HTTP Request Method -// const request = req.method; - -// const queryString = ` -// INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) -// VALUES ($1, $2, $3, $4, $5, $6);`; - -// // Wait for the response to finish before inserting the record -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alertModule.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alertModule.sendEmail(res.statusCode, res.statusMessage, email); -// } -// const responsestatus = res.statusCode; -// const responsemessage = res.statusMessage; -// const values = [microservice, endpoint, request, responsestatus, responsemessage, correlatingId]; -// client.query(queryString, values, (err: any, result: any) => { -// if (err) throw err; -// console.log('Request cycle saved'); -// }); -// }); -// next(); -// }; -// }; - -// /** -// * Constructs a parameterized query string for inserting multiple data points. -// * @param numRows Number of rows to insert -// * @param serviceName Table name to insert into -// * @returns The constructed query string -// */ -// function createQueryString(numRows: number, serviceName: string): string { -// let query = ` -// INSERT INTO -// ${serviceName} (metric, value, category, time) -// VALUES -// `; -// for (let i = 0; i < numRows; i++) { -// const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; -// query = query.concat(newRow); -// if (i !== numRows - 1) query = query.concat(','); -// } -// query = query.concat(';'); -// return query; -// } - -// /** -// * Constructs an array of values to be used with the parameterized query. -// * @param dataPointsArray Array of data point objects -// * @returns Array of values -// */ -// function createQueryArray(dataPointsArray: any[]): (string | number)[] { -// const queryArray: (string | number)[] = []; -// for (const element of dataPointsArray) { -// queryArray.push(element.metric); -// queryArray.push(element.value); -// queryArray.push(element.category); -// queryArray.push(element.time / 1000); // Convert milliseconds to seconds for PostgreSQL -// } -// return queryArray; -// } - -// /** -// * Reads and stores microservice health information in the PostgreSQL database at every interval. -// * @param microservice Microservice name -// * @param interval Interval (ms) for continuous data collection -// * @param mode The mode (e.g. "kafka", "kubernetes") -// */ -// postgres.health = async ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string }): Promise => { -// let l = 0; -// const currentMetricNames: { [key: string]: boolean } = {}; - -// l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - -// // Create table for the microservice if it doesn't exist yet -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${microservice} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery).catch((err: any) => -// console.log('Error creating health table in PostgreSQL:\n', err) -// ); - -// // Save data point at every interval (ms) -// setInterval(() => { -// collectHealthData() -// .then(async (data: any[]) => { -// if (l !== data.length) { -// l = await postgres.addMetrics(data, mode, currentMetricNames); -// } -// const documents = data.filter(el => el.metric in currentMetricNames); -// const numRows = documents.length; -// const queryString = createQueryString(numRows, microservice); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log('Health data recorded in PostgreSQL')) -// .catch((err: any) => console.log('Error inserting health data into PostgreSQL:\n', err)); -// }, interval); -// }; - -// /** -// * Runs instead of health when dockerized. -// * Collects container information. -// * @param microservice Microservice name -// * @param interval Interval (ms) to collect docker data -// */ -// postgres.docker = function ({ microservice, interval }: { microservice: string; interval: number }): void { -// // Create containerInfo table if it does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS containerInfo( -// _id serial PRIMARY KEY, -// microservice varchar(500) NOT NULL, -// containerName varchar(500) NOT NULL, -// containerId varchar(500) NOT NULL, -// containerPlatform varchar(500), -// containerStartTime varchar(500), -// containerMemUsage real DEFAULT 0, -// containerMemLimit real DEFAULT 0, -// containerMemPercent real DEFAULT 0, -// containerCpuPercent real DEFAULT 0, -// networkReceived real DEFAULT 0, -// networkSent real DEFAULT 0, -// containerProcessCount integer DEFAULT 0, -// containerRestartCount integer DEFAULT 0 -// )`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// dockerHelper -// .getDockerContainer(microservice) -// .then((containerData: any) => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then((data: any) => { -// const queryString = ` -// INSERT INTO containerInfo( -// microservice, -// containerName, -// containerId, -// containerPlatform, -// containerStartTime, -// containerMemUsage, -// containerMemLimit, -// containerMemPercent, -// containerCpuPercent, -// networkReceived, -// networkSent, -// containerProcessCount, -// containerRestartCount -// ) -// VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`; -// const values = [ -// microservice, -// data.containername, -// data.containerid, -// data.platform, -// data.starttime, -// data.memoryusage, -// data.memorylimit, -// data.memorypercent, -// data.cpupercent, -// data.networkreceived, -// data.networksent, -// data.processcount, -// data.restartcount, -// ]; - -// client.query(queryString, values, (err: any, results: any) => { -// if (err) throw err; -// console.log(`Docker data recorded in SQL table containerInfo`); -// }); -// }) -// .catch((err: any) => console.log('Error reading docker container:', err)); -// }, interval); -// }) -// .catch((error: any) => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// postgres.serverQuery = (config: any): void => { -// postgres.saveService(config); -// postgres.setQueryOnInterval(config); -// }; - -// postgres.saveService = (config: any): void => { -// let service: string; -// if (config.mode === 'kakfa') service = 'kafkametrics'; -// else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; -// else throw new Error('Unrecognized mode'); - -// postgres.services({ microservice: service, interval: config.interval }); - -// // Create service table if it does not exist -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${service} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery).catch((err: any) => -// console.log(`Error creating ${service} table in PostgreSQL:\n`, err) -// ); -// }; - -// postgres.setQueryOnInterval = async (config: any): Promise => { -// let service: string; -// let metricsQuery: any; -// let currentMetrics: any; -// let l = 0; -// const currentMetricNames: { [key: string]: boolean } = {}; - -// if (config.mode === 'kakfa') { -// service = 'kafkametrics'; -// metricsQuery = utilities.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// service = 'kubernetesmetrics'; -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } - -// currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); -// currentMetrics = currentMetrics.rows; -// if (currentMetrics.length > 0) { -// currentMetrics.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// l = currentMetrics.length; -// }); -// } - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// if (l !== parsedArray.length) { -// l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) documents.push(metric); -// } -// const numRows = documents.length; -// const queryString = createQueryString(numRows, service); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) -// .catch((err: any) => -// console.log(`Error inserting ${config.mode} metrics into PostgreSQL:\n`, err) -// ); -// }, config.interval); -// }; - -// postgres.getSavedMetricsLength = async ( -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); -// if (currentMetrics.rows.length > 0) { -// currentMetrics.rows.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.rows.length || 0; -// }; - -// postgres.addMetrics = async ( -// arr: any[], -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; -// arr.forEach((el: any) => { -// if (!(el.metric in currentMetricNames)) { -// currentMetricNames[el.metric] = true; -// metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); -// } -// }); -// metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); -// await client.query(metricsQueryString); -// return arr.length; -// }; - -// export default postgres; - -// // File: postgres.ts - -// // Use Node's createRequire to load CommonJS modules in this ES module file. -// import { createRequire } from 'module'; -// const require = createRequire(import.meta.url); - -// // Use createRequire to import the 'pg' package -// const { Client } = require('pg'); - -// // Import local modules using require -// const alertModule = require('./alert.js'); -// const { collectHealthData } = require('./healthHelpers.js'); -// const dockerHelper = require('./dockerHelper.js'); -// const utilities = require('./utilities.js'); - -// let client: any; - -// // In this example we type postgres as any. Later you might define a proper interface. -// const postgres: any = {}; - -// /** -// * Initializes connection to PostgreSQL database using provided URI -// * @param database Contains DB type and DB URI -// */ -// postgres.connect = async ({ database }: { database: { URI: string } }): Promise => { -// try { -// // Connect to user's database -// client = new Client({ connectionString: database.URI }); -// await client.connect(); - -// // Print success message -// console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); -// } catch (error: any) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', error.message); -// } -// }; - -// /** -// * Create services table with each entry representing a microservice. -// * @param microservice Microservice name -// * @param interval Interval to collect data -// */ -// postgres.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// // Create services table if it does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS services ( -// _id SERIAL PRIMARY KEY NOT NULL, -// microservice VARCHAR(248) NOT NULL UNIQUE, -// interval INTEGER NOT NULL)`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// client.query( -// `CREATE TABLE IF NOT EXISTS metrics ( -// _id SERIAL PRIMARY KEY NOT NULL, -// metric TEXT NOT NULL UNIQUE, -// selected BOOLEAN, -// mode TEXT NOT NULL)`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// // Insert microservice name and interval into services table -// const queryString = ` -// INSERT INTO services (microservice, interval) -// VALUES ($1, $2) -// ON CONFLICT (microservice) DO NOTHING;`; - -// const values = [microservice, interval]; - -// client.query(queryString, values, (err: any, result: any) => { -// if (err) throw err; -// console.log(`Microservice "${microservice}" recorded in services table`); -// }); -// }; - -// /** -// * Creates a communications table if one does not yet exist and -// * traces the request throughout its life cycle. Will send a notification -// * to the user if contact information is provided. -// * @param microservice Microservice name -// * @param slack Slack settings (optional) -// * @param email Email settings (optional) -// */ -// postgres.communications = ({ microservice, slack, email }: { microservice: string; slack?: any; email?: any }) => { -// // Create communications table if one does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// endpoint varchar(248) NOT NULL, -// request varchar(16) NOT NULL, -// responsestatus INTEGER NOT NULL, -// responsemessage varchar(500) NOT NULL, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// return (req: any, res: any, next: any) => { -// // ID persists throughout request lifecycle -// const correlatingId = res.getHeaders()['x-correlation-id']; - -// // Target endpoint -// const endpoint = req.originalUrl; -// // HTTP Request Method -// const request = req.method; - -// const queryString = ` -// INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) -// VALUES ($1, $2, $3, $4, $5, $6);`; - -// // Wait for the response to finish before inserting the record -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alertModule.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alertModule.sendEmail(res.statusCode, res.statusMessage, email); -// } -// const responsestatus = res.statusCode; -// const responsemessage = res.statusMessage; -// const values = [microservice, endpoint, request, responsestatus, responsemessage, correlatingId]; -// client.query(queryString, values, (err: any, result: any) => { -// if (err) throw err; -// console.log('Request cycle saved'); -// }); -// }); -// next(); -// }; -// }; - -// /** -// * Constructs a parameterized query string for inserting multiple data points. -// * @param numRows Number of rows to insert -// * @param serviceName Table name to insert into -// * @returns The constructed query string -// */ -// function createQueryString(numRows: number, serviceName: string): string { -// let query = ` -// INSERT INTO -// ${serviceName} (metric, value, category, time) -// VALUES -// `; -// for (let i = 0; i < numRows; i++) { -// const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; -// query = query.concat(newRow); -// if (i !== numRows - 1) query = query.concat(','); -// } -// query = query.concat(';'); -// return query; -// } - -// /** -// * Constructs an array of values to be used with the parameterized query. -// * @param dataPointsArray Array of data point objects -// * @returns Array of values -// */ -// function createQueryArray(dataPointsArray: any[]): (string | number)[] { -// const queryArray: (string | number)[] = []; -// for (const element of dataPointsArray) { -// queryArray.push(element.metric); -// queryArray.push(element.value); -// queryArray.push(element.category); -// queryArray.push(element.time / 1000); // Convert milliseconds to seconds for PostgreSQL -// } -// return queryArray; -// } - -// /** -// * Reads and stores microservice health information in the PostgreSQL database at every interval. -// * @param microservice Microservice name -// * @param interval Interval (ms) for continuous data collection -// * @param mode The mode (e.g. "kafka", "kubernetes") -// */ -// postgres.health = async ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string }): Promise => { -// let l = 0; -// const currentMetricNames: { [key: string]: boolean } = {}; - -// l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - -// // Create table for the microservice if it doesn't exist yet -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${microservice} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery).catch((err: any) => -// console.log('Error creating health table in PostgreSQL:\n', err) -// ); - -// // Save data point at every interval (ms) -// setInterval(() => { -// collectHealthData() -// .then(async (data: any[]) => { -// if (l !== data.length) { -// l = await postgres.addMetrics(data, mode, currentMetricNames); -// } -// const documents = data.filter(el => el.metric in currentMetricNames); -// const numRows = documents.length; -// const queryString = createQueryString(numRows, microservice); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log('Health data recorded in PostgreSQL')) -// .catch((err: any) => console.log('Error inserting health data into PostgreSQL:\n', err)); -// }, interval); -// }; - -// /** -// * Runs instead of health when dockerized. -// * Collects container information. -// * @param microservice Microservice name -// * @param interval Interval (ms) to collect docker data -// */ -// postgres.docker = function ({ microservice, interval }: { microservice: string; interval: number }): void { -// // Create containerInfo table if it does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS containerInfo( -// _id serial PRIMARY KEY, -// microservice varchar(500) NOT NULL, -// containerName varchar(500) NOT NULL, -// containerId varchar(500) NOT NULL, -// containerPlatform varchar(500), -// containerStartTime varchar(500), -// containerMemUsage real DEFAULT 0, -// containerMemLimit real DEFAULT 0, -// containerMemPercent real DEFAULT 0, -// containerCpuPercent real DEFAULT 0, -// networkReceived real DEFAULT 0, -// networkSent real DEFAULT 0, -// containerProcessCount integer DEFAULT 0, -// containerRestartCount integer DEFAULT 0 -// )`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// dockerHelper -// .getDockerContainer(microservice) -// .then((containerData: any) => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then((data: any) => { -// const queryString = ` -// INSERT INTO containerInfo( -// microservice, -// containerName, -// containerId, -// containerPlatform, -// containerStartTime, -// containerMemUsage, -// containerMemLimit, -// containerMemPercent, -// containerCpuPercent, -// networkReceived, -// networkSent, -// containerProcessCount, -// containerRestartCount -// ) -// VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`; -// const values = [ -// microservice, -// data.containername, -// data.containerid, -// data.platform, -// data.starttime, -// data.memoryusage, -// data.memorylimit, -// data.memorypercent, -// data.cpupercent, -// data.networkreceived, -// data.networksent, -// data.processcount, -// data.restartcount, -// ]; - -// client.query(queryString, values, (err: any, results: any) => { -// if (err) throw err; -// console.log(`Docker data recorded in SQL table containerInfo`); -// }); -// }) -// .catch((err: any) => console.log('Error reading docker container:', err)); -// }, interval); -// }) -// .catch((error: any) => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// postgres.serverQuery = (config: any): void => { -// postgres.saveService(config); -// postgres.setQueryOnInterval(config); -// }; - -// postgres.saveService = (config: any): void => { -// let service: string; -// if (config.mode === 'kakfa') service = 'kafkametrics'; -// else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; -// else throw new Error('Unrecognized mode'); - -// postgres.services({ microservice: service, interval: config.interval }); - -// // Create service table if it does not exist -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${service} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery).catch((err: any) => -// console.log(`Error creating ${service} table in PostgreSQL:\n`, err) -// ); -// }; - -// postgres.setQueryOnInterval = async (config: any): Promise => { -// let service: string; -// let metricsQuery: any; -// let currentMetrics: any; -// let l = 0; -// const currentMetricNames: { [key: string]: boolean } = {}; - -// if (config.mode === 'kakfa') { -// service = 'kafkametrics'; -// metricsQuery = utilities.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// service = 'kubernetesmetrics'; -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } - -// currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); -// currentMetrics = currentMetrics.rows; -// if (currentMetrics.length > 0) { -// currentMetrics.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// l = currentMetrics.length; -// }); -// } - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// if (l !== parsedArray.length) { -// l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) documents.push(metric); -// } -// const numRows = documents.length; -// const queryString = createQueryString(numRows, service); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) -// .catch((err: any) => -// console.log(`Error inserting ${config.mode} metrics into PostgreSQL:\n`, err) -// ); -// }, config.interval); -// }; - -// postgres.getSavedMetricsLength = async ( -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); -// if (currentMetrics.rows.length > 0) { -// currentMetrics.rows.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.rows.length || 0; -// }; - -// postgres.addMetrics = async ( -// arr: any[], -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; -// arr.forEach((el: any) => { -// if (!(el.metric in currentMetricNames)) { -// currentMetricNames[el.metric] = true; -// metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); -// } -// }); -// metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); -// await client.query(metricsQueryString); -// return arr.length; -// }; - -// export default postgres; - - -// // // File: postgres.ts - -// // // You can either keep these require() statements or convert them to ES module imports -// // import { Client } from 'pg'; -// // // Rename alert to avoid conflict with the DOM/global alert function -// // const alertModule = require('./alert'); -// // const { collectHealthData } = require('./healthHelpers'); -// // const dockerHelper = require('./dockerHelper'); -// // const utilities = require('./utilities'); - -// // let client: any; - -// // // In this example we type postgres as any. Later you might define a proper interface. -// // const postgres: any = {}; - -// // /** -// // * Initializes connection to PostgreSQL database using provided URI -// // * @param database Contains DB type and DB URI -// // */ -// // postgres.connect = async ({ database }: { database: { URI: string } }): Promise => { -// // try { -// // // Connect to user's database -// // client = new Client({ connectionString: database.URI }); -// // await client.connect(); - -// // // Print success message -// // console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); -// // } catch ({ message }: { message: string }) { -// // // Print error message -// // console.log('Error connecting to PostgreSQL DB:', message); -// // } -// // }; - -// // /** -// // * Create services table with each entry representing a microservice. -// // * @param microservice Microservice name -// // * @param interval Interval to collect data -// // */ -// // postgres.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { -// // // Create services table if it does not exist -// // client.query( -// // `CREATE TABLE IF NOT EXISTS services ( -// // _id SERIAL PRIMARY KEY NOT NULL, -// // microservice VARCHAR(248) NOT NULL UNIQUE, -// // interval INTEGER NOT NULL)`, -// // (err: any, results: any) => { -// // if (err) throw err; -// // } -// // ); - -// // client.query( -// // `CREATE TABLE IF NOT EXISTS metrics ( -// // _id SERIAL PRIMARY KEY NOT NULL, -// // metric TEXT NOT NULL UNIQUE, -// // selected BOOLEAN, -// // mode TEXT NOT NULL)`, -// // (err: any, results: any) => { -// // if (err) throw err; -// // } -// // ); - -// // // Insert microservice name and interval into services table -// // const queryString = ` -// // INSERT INTO services (microservice, interval) -// // VALUES ($1, $2) -// // ON CONFLICT (microservice) DO NOTHING;`; - -// // const values = [microservice, interval]; - -// // client.query(queryString, values, (err: any, result: any) => { -// // if (err) throw err; -// // console.log(`Microservice "${microservice}" recorded in services table`); -// // }); -// // }; - -// // /** -// // * Creates a communications table if one does not yet exist and -// // * traces the request throughout its life cycle. Will send a notification -// // * to the user if contact information is provided. -// // * @param microservice Microservice name -// // * @param slack Slack settings (optional) -// // * @param email Email settings (optional) -// // */ -// // postgres.communications = ({ microservice, slack, email }: { microservice: string; slack?: any; email?: any }) => { -// // // Create communications table if one does not exist -// // client.query( -// // `CREATE TABLE IF NOT EXISTS communications( -// // _id serial PRIMARY KEY, -// // microservice VARCHAR(248) NOT NULL, -// // endpoint varchar(248) NOT NULL, -// // request varchar(16) NOT NULL, -// // responsestatus INTEGER NOT NULL, -// // responsemessage varchar(500) NOT NULL, -// // time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// // correlatingId varchar(500) -// // )`, -// // (err: any, results: any) => { -// // if (err) throw err; -// // } -// // ); - -// // return (req: any, res: any, next: any) => { -// // // ID persists throughout request lifecycle -// // const correlatingId = res.getHeaders()['x-correlation-id']; - -// // // Target endpoint -// // const endpoint = req.originalUrl; -// // // HTTP Request Method -// // const request = req.method; - -// // const queryString = ` -// // INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) -// // VALUES ($1, $2, $3, $4, $5, $6);`; - -// // // Wait for the response to finish before inserting the record -// // res.on('finish', () => { -// // if (res.statusCode >= 400) { -// // if (slack) alertModule.sendSlack(res.statusCode, res.statusMessage, slack); -// // if (email) alertModule.sendEmail(res.statusCode, res.statusMessage, email); -// // } -// // const responsestatus = res.statusCode; -// // const responsemessage = res.statusMessage; -// // const values = [microservice, endpoint, request, responsestatus, responsemessage, correlatingId]; -// // client.query(queryString, values, (err: any, result: any) => { -// // if (err) throw err; -// // console.log('Request cycle saved'); -// // }); -// // }); -// // next(); -// // }; -// // }; - -// // /** -// // * Constructs a parameterized query string for inserting multiple data points. -// // * @param numRows Number of rows to insert -// // * @param serviceName Table name to insert into -// // * @returns The constructed query string -// // */ -// // function createQueryString(numRows: number, serviceName: string): string { -// // let query = ` -// // INSERT INTO -// // ${serviceName} (metric, value, category, time) -// // VALUES -// // `; -// // for (let i = 0; i < numRows; i++) { -// // const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; -// // query = query.concat(newRow); -// // if (i !== numRows - 1) query = query.concat(','); -// // } -// // query = query.concat(';'); -// // return query; -// // } - -// // /** -// // * Constructs an array of values to be used with the parameterized query. -// // * @param dataPointsArray Array of data point objects -// // * @returns Array of values -// // */ -// // function createQueryArray(dataPointsArray: any[]): (string | number)[] { -// // const queryArray: (string | number)[] = []; -// // for (const element of dataPointsArray) { -// // queryArray.push(element.metric); -// // queryArray.push(element.value); -// // queryArray.push(element.category); -// // queryArray.push(element.time / 1000); // Convert milliseconds to seconds for PostgreSQL -// // } -// // return queryArray; -// // } - -// // /** -// // * Reads and stores microservice health information in the PostgreSQL database at every interval. -// // * @param microservice Microservice name -// // * @param interval Interval (ms) for continuous data collection -// // * @param mode The mode (e.g. "kafka", "kubernetes") -// // */ -// // postgres.health = async ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string }): Promise => { -// // let l = 0; -// // const currentMetricNames: { [key: string]: boolean } = {}; - -// // l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - -// // // Create table for the microservice if it doesn't exist yet -// // const createTableQuery = ` -// // CREATE TABLE IF NOT EXISTS ${microservice} ( -// // _id SERIAL PRIMARY KEY, -// // metric VARCHAR(200), -// // value FLOAT DEFAULT 0.0, -// // category VARCHAR(200) DEFAULT 'event', -// // time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// // );`; - -// // client.query(createTableQuery).catch((err: any) => -// // console.log('Error creating health table in PostgreSQL:\n', err) -// // ); - -// // Save data point at every interval (ms) -// setInterval(() => { -// collectHealthData() -// .then(async (data: any[]) => { -// if (l !== data.length) { -// l = await postgres.addMetrics(data, mode, currentMetricNames); -// } -// const documents = data.filter(el => el.metric in currentMetricNames); -// const numRows = documents.length; -// const queryString = createQueryString(numRows, microservice); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log('Health data recorded in PostgreSQL')) -// .catch((err: any) => console.log('Error inserting health data into PostgreSQL:\n', err)); -// }, interval); -// }; - -// /** -// * Runs instead of health when dockerized. -// * Collects container information. -// * @param microservice Microservice name -// * @param interval Interval (ms) to collect docker data -// */ -// postgres.docker = function ({ microservice, interval }: { microservice: string; interval: number }): void { -// // Create containerInfo table if it does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS containerInfo( -// _id serial PRIMARY KEY, -// microservice varchar(500) NOT NULL, -// containerName varchar(500) NOT NULL, -// containerId varchar(500) NOT NULL, -// containerPlatform varchar(500), -// containerStartTime varchar(500), -// containerMemUsage real DEFAULT 0, -// containerMemLimit real DEFAULT 0, -// containerMemPercent real DEFAULT 0, -// containerCpuPercent real DEFAULT 0, -// networkReceived real DEFAULT 0, -// networkSent real DEFAULT 0, -// containerProcessCount integer DEFAULT 0, -// containerRestartCount integer DEFAULT 0 -// )`, -// (err: any, results: any) => { -// if (err) throw err; -// } -// ); - -// dockerHelper -// .getDockerContainer(microservice) -// .then((containerData: any) => { -// setInterval(() => { -// dockerHelper -// .readDockerContainer(containerData) -// .then((data: any) => { -// const queryString = ` -// INSERT INTO containerInfo( -// microservice, -// containerName, -// containerId, -// containerPlatform, -// containerStartTime, -// containerMemUsage, -// containerMemLimit, -// containerMemPercent, -// containerCpuPercent, -// networkReceived, -// networkSent, -// containerProcessCount, -// containerRestartCount -// ) -// VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`; -// const values = [ -// microservice, -// data.containername, -// data.containerid, -// data.platform, -// data.starttime, -// data.memoryusage, -// data.memorylimit, -// data.memorypercent, -// data.cpupercent, -// data.networkreceived, -// data.networksent, -// data.processcount, -// data.restartcount, -// ]; - -// client.query(queryString, values, (err: any, results: any) => { -// if (err) throw err; -// console.log(`Docker data recorded in SQL table containerInfo`); -// }); -// }) -// .catch((err: any) => console.log('Error reading docker container:', err)); -// }, interval); -// }) -// .catch((error: any) => { -// if (error.constructor.name === 'Error') throw error; -// else throw new Error(error); -// }); -// }; - -// postgres.serverQuery = (config: any): void => { -// postgres.saveService(config); -// postgres.setQueryOnInterval(config); -// }; - -// postgres.saveService = (config: any): void => { -// let service: string; -// if (config.mode === 'kakfa') service = 'kafkametrics'; -// else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; -// else throw new Error('Unrecognized mode'); - -// postgres.services({ microservice: service, interval: config.interval }); - -// // Create service table if it does not exist -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${service} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery).catch((err: any) => -// console.log(`Error creating ${service} table in PostgreSQL:\n`, err) -// ); -// }; - -// postgres.setQueryOnInterval = async (config: any): Promise => { -// let service: string; -// let metricsQuery: any; -// let currentMetrics: any; -// let l = 0; -// const currentMetricNames: { [key: string]: boolean } = {}; - -// if (config.mode === 'kakfa') { -// service = 'kafkametrics'; -// metricsQuery = utilities.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// service = 'kubernetesmetrics'; -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode'); -// } - -// currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); -// currentMetrics = currentMetrics.rows; -// if (currentMetrics.length > 0) { -// currentMetrics.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// l = currentMetrics.length; -// }); -// } - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray: any[]) => { -// if (l !== parsedArray.length) { -// l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// const documents: any[] = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) documents.push(metric); -// } -// const numDataPoints = documents.length; -// const queryString = createQueryString(numDataPoints, service); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) -// .catch((err: any) => -// console.log(`Error inserting ${config.mode} metrics into PostgreSQL:\n`, err) -// ); -// }, config.interval); -// }; - -// postgres.getSavedMetricsLength = async ( -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); -// if (currentMetrics.rows.length > 0) { -// currentMetrics.rows.forEach((el: any) => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }); -// } -// return currentMetrics.rows.length || 0; -// }; - -// postgres.addMetrics = async ( -// arr: any[], -// mode: string, -// currentMetricNames: { [key: string]: boolean } -// ): Promise => { -// let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; -// arr.forEach((el: any) => { -// if (!(el.metric in currentMetricNames)) { -// currentMetricNames[el.metric] = true; -// metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); -// } -// }); -// metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); -// await client.query(metricsQueryString); -// return arr.length; -// }; - -// export default postgres; - - - -// // NPM package that gathers health information -// const { Client } = require('pg'); -// const alert = require('./alert'); -// const { collectHealthData } = require('./healthHelpers'); -// const dockerHelper = require('./dockerHelper') -// const utilities = require('./utilities'); - -// let client; - -// const postgres = {}; - -// /** -// * Initializes connection to PostgreSQL database using provided URI -// * @param {Object} database Contains DB type and DB URI -// */ -// postgres.connect = async ({ database }) => { -// try { -// // Connect to user's database -// client = new Client({ connectionString: database.URI }); -// await client.connect(); - -// // Print success message -// console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', message); -// } -// }; - -// /** -// * Create services table with each entry representing a microservice -// * @param {string} microservice Microservice name -// * @param {number} interval Interval to collect data -// */ -// postgres.services = ({ microservice, interval }) => { -// // Create services table if does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS services ( -// _id SERIAL PRIMARY KEY NOT NULL, -// microservice VARCHAR(248) NOT NULL UNIQUE, -// interval INTEGER NOT NULL)`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); - -// client.query( -// `CREATE TABLE IF NOT EXISTS metrics ( -// _id SERIAL PRIMARY KEY NOT NULL, -// metric TEXT NOT NULL UNIQUE, -// selected BOOLEAN, -// mode TEXT NOT NULL)`, -// (err, results) => { -// if (err) { -// throw err; -// } -// }); - -// // Insert microservice name and interval into services table -// const queryString = ` -// INSERT INTO services (microservice, interval) -// VALUES ($1, $2) -// ON CONFLICT (microservice) DO NOTHING;`; - -// const values = [microservice, interval]; - -// client.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log(`Microservice "${microservice}" recorded in services table`); -// }); -// }; - -// /** -// * Creates a communications table if one does not yet exist and -// * traces the request throughout its life cycle. Will send a notification -// * to the user if contact information is provided -// * @param {string} microservice Microservice name -// * @param {Object|undefined} slack Slack settings -// * @param {Object|undefined} email Email settings -// */ -// postgres.communications = ({ microservice, slack, email }) => { -// // Create communications table if one does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// endpoint varchar(248) NOT NULL, -// request varchar(16) NOT NULL, -// responsestatus INTEGER NOT NULL, -// responsemessage varchar(500) NOT NULL, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); -// return (req, res, next) => { -// // ID persists throughout request lifecycle -// const correlatingId = res.getHeaders()['x-correlation-id']; - -// // Target endpoint -// const endpoint = req.originalUrl; - -// // HTTP Request Method -// const request = req.method; - -// const queryString = ` -// INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) -// VALUES ($1, $2, $3, $4, $5, $6);`; - -// // Waits for response to finish before pushing information into database -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// // Grabs status code from response object -// const responsestatus = res.statusCode; -// // Grabs status message from response object -// const responsemessage = res.statusMessage; -// const values = [ -// microservice, -// endpoint, -// request, -// responsestatus, -// responsemessage, -// correlatingId, -// ]; -// client.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log('Request cycle saved'); -// }); -// }); -// next(); -// }; -// }; - -// // Constructs a parameterized query string for inserting multiple data points into -// // the kafkametrics db based on the number of data points; -// function createQueryString(numRows, serviceName) { -// let query = ` -// INSERT INTO -// ${serviceName} (metric, value, category, time) -// VALUES -// `; -// for (let i = 0; i < numRows; i++) { -// const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; -// query = query.concat(newRow); -// if (i !== numRows - 1) query = query.concat(','); -// } -// query = query.concat(';'); -// return query; -// } - -// // Places the values being inserted into postgres into an array that will eventually -// // hydrate the parameterized query -// function createQueryArray(dataPointsArray, currentMetricNames) { -// const queryArray = []; -// for (const element of dataPointsArray) { -// queryArray.push(element.metric); -// queryArray.push(element.value); -// queryArray.push(element.category); -// queryArray.push(element.time / 1000); -// // Converts milliseconds to seconds to work with postgres -// } -// return queryArray; -// } - -// /** -// * Read and store microservice health information in postgres database at every interval -// * @param {string} microservice Microservice name -// * @param {number} interval Interval for continuous data collection -// */ -// postgres.health = async ({ microservice, interval, mode }) => { -// let l = 0; -// const currentMetricNames = {}; - -// l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - -// // Create table for the microservice if it doesn't exist yet -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${microservice} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client -// .query(createTableQuery) -// .catch(err => console.log('Error creating health table in PostgreSQL:\n', err)); - -// // Save data point at every interval (ms) -// setInterval(() => { -// collectHealthData() -// .then(async (data) => { -// if (l !== data.length) { -// l = await postgres.addMetrics(data, mode, currentMetricNames); -// } -// const documents = data.filter(el => (el.metric in currentMetricNames)); -// const numRows = documents.length; -// const queryString = createQueryString(numRows, microservice); -// const queryArray = createQueryArray(documents); -// // console.log('POSTGRES QUERY STRING: ', queryString); -// // console.log('POSTGRES QUERY ARRAY', queryArray); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log('Health data recorded in PostgreSQL')) -// .catch(err => console.log('Error inserting health data into PostgreSQL:\n', err)); -// }, interval); -// }; - -// /** -// * !Runs instead of health for docker -// * If dockerized is true, this function is invoked -// * Collects information on the container -// */ -// postgres.docker = function ({ microservice, interval }) { -// // Create a table if it doesn't already exist. -// client.query( -// `CREATE TABLE IF NOT EXISTS containerInfo( -// _id serial PRIMARY KEY, -// microservice varchar(500) NOT NULL, -// containerName varchar(500) NOT NULL, -// containerId varchar(500) NOT NULL, -// containerPlatform varchar(500), -// containerStartTime varchar(500), -// containerMemUsage real DEFAULT 0, -// containerMemLimit real DEFAULT 0, -// containerMemPercent real DEFAULT 0, -// containerCpuPercent real DEFAULT 0, -// networkReceived real DEFAULT 0, -// networkSent real DEFAULT 0, -// containerProcessCount integer DEFAULT 0, -// containerRestartCount integer DEFAULT 0 -// )`, -// function (err, results) { -// if (err) throw err; -// } -// ); - -// dockerHelper.getDockerContainer(microservice) -// .then((containerData) => { -// setInterval(() => { -// dockerHelper.readDockerContainer(containerData) -// .then((data) => { -// let queryString = -// `INSERT INTO containerInfo( -// microservice, -// containerName, -// containerId, -// containerPlatform, -// containerStartTime, -// containerMemUsage, -// containerMemLimit, -// containerMemPercent, -// containerCpuPercent, -// networkReceived, -// networkSent, -// containerProcessCount, -// containerRestartCount) -// VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13 -// )`; - -// let values = [ -// microservice, -// data.containername, -// data.containerid, -// data.platform, -// data.starttime, -// data.memoryusage, -// data.memorylimit, -// data.memorypercent, -// data.cpupercent, -// data.networkreceived, -// data.networksent, -// data.processcount, -// data.restartcount, -// ]; - -// client.query(queryString, values, function (err, results) { -// if (err) throw err; -// console.log(`Docker data recorded in SQL table containerInfo`); -// }); -// }) -// }, interval) -// }) - -// .catch((error) => { -// if (error.constructor.name === 'Error') throw error -// else throw new Error(error); -// }) -// } - - -// postgres.serverQuery = (config) => { -// postgres.saveService(config); -// postgres.setQueryOnInterval(config); -// } - - -// postgres.saveService = (config) => { -// let service; -// if (config.mode === 'kakfa') service = 'kafkametrics'; -// else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; -// else throw new Error('Unrecognized mode'); - -// postgres.services({ microservice: service, interval: config.interval }); - -// // create kafkametrics table if it does not exist -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${service} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery) -// .catch(err => console.log(`Error creating ${service} table in PostgreSQL:\n`, err)); -// } - - -// postgres.setQueryOnInterval = async (config) => { -// let service; -// let metricsQuery; -// let currentMetrics; -// let l = 0; -// const currentMetricNames = {}; - -// if (config.mode === 'kakfa') { -// service = 'kafkametrics' -// metricsQuery = utilities.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// service = 'kubernetesmetrics'; -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode') -// }; - -// currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); -// currentMetrics = currentMetrics.rows; -// // currentMetrics is -// // [ -// // { _id: 1, metric: 'testmetric', selected: true, mode: 'kubernetes' } -// // ] -// if (currentMetrics.length > 0) { -// currentMetrics.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// l = currentMetrics.length; -// }) -// } - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray) => { -// if (l !== parsedArray.length) { -// l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// const documents = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) documents.push(metric) -// } -// const numDataPoints = documents.length; -// const queryString = createQueryString(numDataPoints, service); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) -// .catch(err => console.log(`Error inserting ${config.mode} metrics into PostgreSQL:`, '\n', err)); -// }, config.interval); -// } - -// postgres.getSavedMetricsLength = async (mode, currentMetricNames) => { -// let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); -// if (currentMetrics.rows.length > 0) { -// currentMetrics.rows.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }) -// } -// return currentMetrics.rows.length ? currentMetrics.rows.length : 0; -// } - -// postgres.addMetrics = async (arr, mode, currentMetricNames) => { -// let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; -// arr.forEach(el => { -// if (!(el.metric in currentMetricNames)) { -// currentMetricNames[el.metric] = true; -// metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); -// } -// }) -// metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); -// await client.query(metricsQueryString); -// return arr.length; -// } - -// module.exports = postgres; - - -// // NPM package that gathers health information -// const { Client } = require('pg'); -// const alert = require('./alert'); -// const { collectHealthData } = require('./healthHelpers'); -// const dockerHelper = require('./dockerHelper') -// // const utilities = require('./utilities'); -// import utilities from './utilities.js'; - -// let client; - -// const postgres = {}; - -// /** -// * Initializes connection to PostgreSQL database using provided URI -// * @param {Object} database Contains DB type and DB URI -// */ -// postgres.connect = async ({ database }) => { -// try { -// // Connect to user's database -// client = new Client({ connectionString: database.URI }); -// await client.connect(); - -// // Print success message -// console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', message); -// } -// }; - -// /** -// * Create services table with each entry representing a microservice -// * @param {string} microservice Microservice name -// * @param {number} interval Interval to collect data -// */ -// postgres.services = ({ microservice, interval }) => { -// // Create services table if does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS services ( -// _id SERIAL PRIMARY KEY NOT NULL, -// microservice VARCHAR(248) NOT NULL UNIQUE, -// interval INTEGER NOT NULL)`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); - -// client.query( -// `CREATE TABLE IF NOT EXISTS metrics ( -// _id SERIAL PRIMARY KEY NOT NULL, -// metric TEXT NOT NULL UNIQUE, -// selected BOOLEAN, -// mode TEXT NOT NULL)`, -// (err, results) => { -// if (err) { -// throw err; -// } -// }); - -// // Insert microservice name and interval into services table -// const queryString = ` -// INSERT INTO services (microservice, interval) -// VALUES ($1, $2) -// ON CONFLICT (microservice) DO NOTHING;`; - -// const values = [microservice, interval]; - -// client.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log(`Microservice "${microservice}" recorded in services table`); -// }); -// }; - -// /** -// * Creates a communications table if one does not yet exist and -// * traces the request throughout its life cycle. Will send a notification -// * to the user if contact information is provided -// * @param {string} microservice Microservice name -// * @param {Object|undefined} slack Slack settings -// * @param {Object|undefined} email Email settings -// */ -// postgres.communications = ({ microservice, slack, email }) => { -// // Create communications table if one does not exist -// client.query( -// `CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// endpoint varchar(248) NOT NULL, -// request varchar(16) NOT NULL, -// responsestatus INTEGER NOT NULL, -// responsemessage varchar(500) NOT NULL, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); -// return (req, res, next) => { -// // ID persists throughout request lifecycle -// const correlatingId = res.getHeaders()['x-correlation-id']; - -// // Target endpoint -// const endpoint = req.originalUrl; - -// // HTTP Request Method -// const request = req.method; - -// const queryString = ` -// INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) -// VALUES ($1, $2, $3, $4, $5, $6);`; - -// // Waits for response to finish before pushing information into database -// res.on('finish', () => { -// if (res.statusCode >= 400) { -// if (slack) alert.sendSlack(res.statusCode, res.statusMessage, slack); -// if (email) alert.sendEmail(res.statusCode, res.statusMessage, email); -// } -// // Grabs status code from response object -// const responsestatus = res.statusCode; -// // Grabs status message from response object -// const responsemessage = res.statusMessage; -// const values = [ -// microservice, -// endpoint, -// request, -// responsestatus, -// responsemessage, -// correlatingId, -// ]; -// client.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log('Request cycle saved'); -// }); -// }); -// next(); -// }; -// }; - -// // Constructs a parameterized query string for inserting multiple data points into -// // the kafkametrics db based on the number of data points; -// function createQueryString(numRows, serviceName) { -// let query = ` -// INSERT INTO -// ${serviceName} (metric, value, category, time) -// VALUES -// `; -// for (let i = 0; i < numRows; i++) { -// const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; -// query = query.concat(newRow); -// if (i !== numRows - 1) query = query.concat(','); -// } -// query = query.concat(';'); -// return query; -// } - -// // Places the values being inserted into postgres into an array that will eventually -// // hydrate the parameterized query -// function createQueryArray(dataPointsArray, currentMetricNames) { -// const queryArray = []; -// for (const element of dataPointsArray) { -// queryArray.push(element.metric); -// queryArray.push(element.value); -// queryArray.push(element.category); -// queryArray.push(element.time / 1000); -// // Converts milliseconds to seconds to work with postgres -// } -// return queryArray; -// } - -// /** -// * Read and store microservice health information in postgres database at every interval -// * @param {string} microservice Microservice name -// * @param {number} interval Interval for continuous data collection -// */ -// postgres.health = async ({ microservice, interval, mode }) => { -// let l = 0; -// const currentMetricNames = {}; - -// l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - -// // Create table for the microservice if it doesn't exist yet -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${microservice} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client -// .query(createTableQuery) -// .catch(err => console.log('Error creating health table in PostgreSQL:\n', err)); - -// // Save data point at every interval (ms) -// setInterval(() => { -// collectHealthData() -// .then(async (data) => { -// if (l !== data.length) { -// l = await postgres.addMetrics(data, mode, currentMetricNames); -// } -// const documents = data.filter(el => (el.metric in currentMetricNames)); -// const numRows = documents.length; -// const queryString = createQueryString(numRows, microservice); -// const queryArray = createQueryArray(documents); -// // console.log('POSTGRES QUERY STRING: ', queryString); -// // console.log('POSTGRES QUERY ARRAY', queryArray); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log('Health data recorded in PostgreSQL')) -// .catch(err => console.log('Error inserting health data into PostgreSQL:\n', err)); -// }, interval); -// }; - -// /** -// * !Runs instead of health for docker -// * If dockerized is true, this function is invoked -// * Collects information on the container -// */ -// postgres.docker = function ({ microservice, interval }) { -// // Create a table if it doesn't already exist. -// client.query( -// `CREATE TABLE IF NOT EXISTS containerInfo( -// _id serial PRIMARY KEY, -// microservice varchar(500) NOT NULL, -// containerName varchar(500) NOT NULL, -// containerId varchar(500) NOT NULL, -// containerPlatform varchar(500), -// containerStartTime varchar(500), -// containerMemUsage real DEFAULT 0, -// containerMemLimit real DEFAULT 0, -// containerMemPercent real DEFAULT 0, -// containerCpuPercent real DEFAULT 0, -// networkReceived real DEFAULT 0, -// networkSent real DEFAULT 0, -// containerProcessCount integer DEFAULT 0, -// containerRestartCount integer DEFAULT 0 -// )`, -// function (err, results) { -// if (err) throw err; -// } -// ); - -// dockerHelper.getDockerContainer(microservice) -// .then((containerData) => { -// setInterval(() => { -// dockerHelper.readDockerContainer(containerData) -// .then((data) => { -// let queryString = -// `INSERT INTO containerInfo( -// microservice, -// containerName, -// containerId, -// containerPlatform, -// containerStartTime, -// containerMemUsage, -// containerMemLimit, -// containerMemPercent, -// containerCpuPercent, -// networkReceived, -// networkSent, -// containerProcessCount, -// containerRestartCount) -// VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13 -// )`; - -// let values = [ -// microservice, -// data.containername, -// data.containerid, -// data.platform, -// data.starttime, -// data.memoryusage, -// data.memorylimit, -// data.memorypercent, -// data.cpupercent, -// data.networkreceived, -// data.networksent, -// data.processcount, -// data.restartcount, -// ]; - -// client.query(queryString, values, function (err, results) { -// if (err) throw err; -// console.log(`Docker data recorded in SQL table containerInfo`); -// }); -// }) -// }, interval) -// }) - -// .catch((error) => { -// if (error.constructor.name === 'Error') throw error -// else throw new Error(error); -// }) -// } - - -// postgres.serverQuery = (config) => { -// postgres.saveService(config); -// postgres.setQueryOnInterval(config); -// } - - -// postgres.saveService = (config) => { -// let service; -// if (config.mode === 'kakfa') service = 'kafkametrics'; -// else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; -// else throw new Error('Unrecognized mode'); - -// postgres.services({ microservice: service, interval: config.interval }); - -// // create kafkametrics table if it does not exist -// const createTableQuery = ` -// CREATE TABLE IF NOT EXISTS ${service} ( -// _id SERIAL PRIMARY KEY, -// metric VARCHAR(200), -// value FLOAT DEFAULT 0.0, -// category VARCHAR(200) DEFAULT 'event', -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP -// );`; - -// client.query(createTableQuery) -// .catch(err => console.log(`Error creating ${service} table in PostgreSQL:\n`, err)); -// } - - -// postgres.setQueryOnInterval = async (config) => { -// let service; -// let metricsQuery; -// let currentMetrics; -// let l = 0; -// const currentMetricNames = {}; - -// if (config.mode === 'kakfa') { -// service = 'kafkametrics' -// metricsQuery = utilities.kafkaMetricsQuery; -// } else if (config.mode === 'kubernetes') { -// service = 'kubernetesmetrics'; -// metricsQuery = utilities.promMetricsQuery; -// } else { -// throw new Error('Unrecognized mode') -// }; - -// currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); -// currentMetrics = currentMetrics.rows; -// // currentMetrics is -// // [ -// // { _id: 1, metric: 'testmetric', selected: true, mode: 'kubernetes' } -// // ] -// if (currentMetrics.length > 0) { -// currentMetrics.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// l = currentMetrics.length; -// }) -// } - -// setInterval(() => { -// metricsQuery(config) -// .then(async (parsedArray) => { -// if (l !== parsedArray.length) { -// l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); -// } -// const documents = []; -// for (const metric of parsedArray) { -// if (currentMetricNames[metric.metric]) documents.push(metric) -// } -// const numDataPoints = documents.length; -// const queryString = createQueryString(numDataPoints, service); -// const queryArray = createQueryArray(documents); -// return client.query(queryString, queryArray); -// }) -// .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) -// .catch(err => console.log(`Error inserting ${config.mode} metrics into PostgreSQL:`, '\n', err)); -// }, config.interval); -// } - -// postgres.getSavedMetricsLength = async (mode, currentMetricNames) => { -// let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); -// if (currentMetrics.rows.length > 0) { -// currentMetrics.rows.forEach(el => { -// const { metric, selected } = el; -// currentMetricNames[metric] = selected; -// }) -// } -// return currentMetrics.rows.length ? currentMetrics.rows.length : 0; -// } - -// postgres.addMetrics = async (arr, mode, currentMetricNames) => { -// let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; -// arr.forEach(el => { -// if (!(el.metric in currentMetricNames)) { -// currentMetricNames[el.metric] = true; -// metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); -// } -// }) -// metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); -// await client.query(metricsQueryString); -// return arr.length; -// } - -// module.exports = postgres; -// File: Postgres.ts - -// Import the pg package as a default export and destructure Client +// Import the pg package and extract the Client class import pkg from 'pg'; const { Client } = pkg; -// Import local modules using require (if these modules haven't been migrated to ESM) -// If you later migrate these to ESM, you can update these imports accordingly. -// const alertModule = require('./alert.js'); -// const { collectHealthData } = require('./healthHelpers.js'); -// const dockerHelper = require('./dockerHelper.js'); -// const utilities = require('./utilities.js'); - -import alertModule from './alert.js'; -// import { collectHealthData } from './healthHelpers.js'; -import healthHelpers from './healthHelpers.js'; -// then use healthHelpers.collectHealthData() +// Import local modules required for additional functionality +import alertModule from './alert.js'; // Handles notifications (Slack/Email alerts) +import healthHelpers from './healthHelpers.js'; // Collects system health data +import dockerHelper from './dockerHelper.js'; // Collects Docker container metrics +import utilities from './utilities.js'; // Contains helper functions -import dockerHelper from './dockerHelper.js'; -import utilities from './utilities.js'; +let client: any; // Stores the PostgreSQL client connection -let client: any; - -// In this example we type postgres as any. Later you might define a proper interface. +// Define a container for all PostgreSQL helper functions const postgres: any = {}; /** - * Initializes connection to PostgreSQL database using provided URI - * @param database Contains DB type and DB URI + * Connects to PostgreSQL Database + * - Uses the provided database URI from config */ postgres.connect = async ({ database }: { database: { URI: string } }): Promise => { try { - // Connect to user's database - client = new Client({ connectionString: database.URI }); - await client.connect(); - - // Print success message - console.log('PostgreSQL database connected at ', database.URI.slice(0, 24), '...'); + client = new Client({ connectionString: database.URI }); // Initialize PostgreSQL client + await client.connect(); // Connect to the database + console.log('βœ… PostgreSQL database connected at', database.URI.slice(0, 24), '...'); } catch (error: any) { - // Print error message - console.log('Error connecting to PostgreSQL DB:', error.message); + console.log('❌ Error connecting to PostgreSQL DB:', error.message); } }; /** - * Create services table with each entry representing a microservice. - * @param microservice Microservice name - * @param interval Interval to collect data + * Creates Services Table if it doesn't exist + * - Stores microservice names and their data collection intervals */ postgres.services = ({ microservice, interval }: { microservice: string; interval: number }): void => { - // Create services table if it does not exist + // Create services table if it does not exist client.query( `CREATE TABLE IF NOT EXISTS services ( _id SERIAL PRIMARY KEY NOT NULL, microservice VARCHAR(248) NOT NULL UNIQUE, interval INTEGER NOT NULL)`, - (err: any, results: any) => { + (err: any) => { if (err) throw err; } ); + // Create table for metrics if it doesn't exist client.query( `CREATE TABLE IF NOT EXISTS metrics ( _id SERIAL PRIMARY KEY NOT NULL, metric TEXT NOT NULL UNIQUE, selected BOOLEAN, mode TEXT NOT NULL)`, - (err: any, results: any) => { + (err: any) => { if (err) throw err; } ); - // Insert microservice name and interval into services table + // Insert microservice into services table const queryString = ` INSERT INTO services (microservice, interval) VALUES ($1, $2) ON CONFLICT (microservice) DO NOTHING;`; - const values = [microservice, interval]; - client.query(queryString, values, (err: any, result: any) => { + client.query(queryString, values, (err: any) => { if (err) throw err; - console.log(`Microservice "${microservice}" recorded in services table`); + console.log(`βœ… Microservice "${microservice}" recorded in services table`); }); }; /** - * Creates a communications table if one does not yet exist and - * traces the request throughout its life cycle. Will send a notification - * to the user if contact information is provided. - * @param microservice Microservice name - * @param slack Slack settings (optional) - * @param email Email settings (optional) + * Creates Communications Table and Tracks Requests + * - Logs request/response details for monitoring + * - Sends notifications via Slack or Email if the response status is an error */ postgres.communications = ({ microservice, slack, email }: { microservice: string; slack?: any; email?: any }) => { // Create communications table if one does not exist @@ -2162,36 +86,33 @@ postgres.communications = ({ microservice, slack, email }: { microservice: strin time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, correlatingId varchar(500) )`, - (err: any, results: any) => { + (err: any) => { if (err) throw err; } ); return (req: any, res: any, next: any) => { - // ID persists throughout request lifecycle const correlatingId = res.getHeaders()['x-correlation-id']; - - // Target endpoint const endpoint = req.originalUrl; - // HTTP Request Method const request = req.method; const queryString = ` INSERT INTO communications (microservice, endpoint, request, responsestatus, responsemessage, correlatingId) VALUES ($1, $2, $3, $4, $5, $6);`; - // Wait for the response to finish before inserting the record res.on('finish', () => { if (res.statusCode >= 400) { if (slack) alertModule.sendSlack(res.statusCode, res.statusMessage, slack); if (email) alertModule.sendEmail(res.statusCode, res.statusMessage, email); } + const responsestatus = res.statusCode; const responsemessage = res.statusMessage; const values = [microservice, endpoint, request, responsestatus, responsemessage, correlatingId]; - client.query(queryString, values, (err: any, result: any) => { + + client.query(queryString, values, (err: any) => { if (err) throw err; - console.log('Request cycle saved'); + console.log('βœ… Request cycle logged in communications table'); }); }); next(); @@ -2206,10 +127,8 @@ postgres.communications = ({ microservice, slack, email }: { microservice: strin */ function createQueryString(numRows: number, serviceName: string): string { let query = ` - INSERT INTO - ${serviceName} (metric, value, category, time) - VALUES - `; + INSERT INTO ${serviceName} (metric, value, category, time) VALUES `; + for (let i = 0; i < numRows; i++) { const newRow = `($${4 * i + 1}, $${4 * i + 2}, $${4 * i + 3}, TO_TIMESTAMP($${4 * i + 4}))`; query = query.concat(newRow); @@ -2220,53 +139,45 @@ function createQueryString(numRows: number, serviceName: string): string { } /** - * Constructs an array of values to be used with the parameterized query. + * Constructs an array of values to be used with the parameterized query. * @param dataPointsArray Array of data point objects * @returns Array of values */ function createQueryArray(dataPointsArray: any[]): (string | number)[] { - const queryArray: (string | number)[] = []; - for (const element of dataPointsArray) { - queryArray.push(element.metric); - queryArray.push(element.value); - queryArray.push(element.category); - queryArray.push(element.time / 1000); // Convert milliseconds to seconds for PostgreSQL - } - return queryArray; + return dataPointsArray.flatMap(el => [ + el.metric, + el.value, + el.category, + el.time / 1000, // Convert milliseconds to seconds for PostgreSQL + ]); } /** - * Reads and stores microservice health information in the PostgreSQL database at every interval. - * @param microservice Microservice name - * @param interval Interval (ms) for continuous data collection - * @param mode The mode (e.g. "kafka", "kubernetes") + * Reads and stores microservice health information in PostgreSQL at set intervals. */ -postgres.health = async ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string }): Promise => { - let l = 0; +postgres.health = async ({ microservice, interval, mode }: { microservice: string; interval: number; mode: string }) => { + let length = 0; const currentMetricNames: { [key: string]: boolean } = {}; + length = await postgres.getSavedMetricsLength(mode, currentMetricNames); - l = await postgres.getSavedMetricsLength(mode, currentMetricNames); - - // Create table for the microservice if it doesn't exist yet - const createTableQuery = ` - CREATE TABLE IF NOT EXISTS ${microservice} ( + client.query( + `CREATE TABLE IF NOT EXISTS ${microservice} ( _id SERIAL PRIMARY KEY, metric VARCHAR(200), value FLOAT DEFAULT 0.0, category VARCHAR(200) DEFAULT 'event', time TIMESTAMP DEFAULT CURRENT_TIMESTAMP - );`; - - client.query(createTableQuery).catch((err: any) => - console.log('Error creating health table in PostgreSQL:\n', err) + );`, + (err: any) => { + if (err) console.log('❌ Error creating health table:', err); + } ); - // Save data point at every interval (ms) setInterval(() => { healthHelpers.collectHealthData() .then(async (data: any[]) => { - if (l !== data.length) { - l = await postgres.addMetrics(data, mode, currentMetricNames); + if (length !== data.length) { + length = await postgres.addMetrics(data, mode, currentMetricNames); } const documents = data.filter(el => el.metric in currentMetricNames); const numRows = documents.length; @@ -2274,201 +185,9 @@ postgres.health = async ({ microservice, interval, mode }: { microservice: strin const queryArray = createQueryArray(documents); return client.query(queryString, queryArray); }) - .then(() => console.log('Health data recorded in PostgreSQL')) - .catch((err: any) => console.log('Error inserting health data into PostgreSQL:\n', err)); + .then(() => console.log('βœ… Health data recorded in PostgreSQL')) + .catch((err: any) => console.log('❌ Error inserting health data:', err)); }, interval); }; -/** - * Runs instead of health when dockerized. - * Collects container information. - * @param microservice Microservice name - * @param interval Interval (ms) to collect docker data - */ -postgres.docker = function ({ microservice, interval }: { microservice: string; interval: number }): void { - // Create containerInfo table if it does not exist - client.query( - `CREATE TABLE IF NOT EXISTS containerInfo( - _id serial PRIMARY KEY, - microservice varchar(500) NOT NULL, - containerName varchar(500) NOT NULL, - containerId varchar(500) NOT NULL, - containerPlatform varchar(500), - containerStartTime varchar(500), - containerMemUsage real DEFAULT 0, - containerMemLimit real DEFAULT 0, - containerMemPercent real DEFAULT 0, - containerCpuPercent real DEFAULT 0, - networkReceived real DEFAULT 0, - networkSent real DEFAULT 0, - containerProcessCount integer DEFAULT 0, - containerRestartCount integer DEFAULT 0 - )`, - (err: any, results: any) => { - if (err) throw err; - } - ); - - dockerHelper - .getDockerContainer(microservice) - .then((containerData: any) => { - setInterval(() => { - dockerHelper - .readDockerContainer(containerData) - .then((data: any) => { - const queryString = ` - INSERT INTO containerInfo( - microservice, - containerName, - containerId, - containerPlatform, - containerStartTime, - containerMemUsage, - containerMemLimit, - containerMemPercent, - containerCpuPercent, - networkReceived, - networkSent, - containerProcessCount, - containerRestartCount - ) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)`; - const values = [ - microservice, - data.containername, - data.containerid, - data.platform, - data.starttime, - data.memoryusage, - data.memorylimit, - data.memorypercent, - data.cpupercent, - data.networkreceived, - data.networksent, - data.processcount, - data.restartcount, - ]; - - client.query(queryString, values, (err: any, results: any) => { - if (err) throw err; - console.log(`Docker data recorded in SQL table containerInfo`); - }); - }) - .catch((err: any) => console.log('Error reading docker container:', err)); - }, interval); - }) - .catch((error: any) => { - if (error.constructor.name === 'Error') throw error; - else throw new Error(error); - }); -}; - -postgres.serverQuery = (config: any): void => { - postgres.saveService(config); - postgres.setQueryOnInterval(config); -}; - -postgres.saveService = (config: any): void => { - let service: string; - if (config.mode === 'kakfa') service = 'kafkametrics'; - else if (config.mode === 'kubernetes') service = 'kubernetesmetrics'; - else throw new Error('Unrecognized mode'); - - postgres.services({ microservice: service, interval: config.interval }); - - // Create service table if it does not exist - const createTableQuery = ` - CREATE TABLE IF NOT EXISTS ${service} ( - _id SERIAL PRIMARY KEY, - metric VARCHAR(200), - value FLOAT DEFAULT 0.0, - category VARCHAR(200) DEFAULT 'event', - time TIMESTAMP DEFAULT CURRENT_TIMESTAMP - );`; - - client.query(createTableQuery).catch((err: any) => - console.log(`Error creating ${service} table in PostgreSQL:\n`, err) - ); -}; - -postgres.setQueryOnInterval = async (config: any): Promise => { - let service: string; - let metricsQuery: any; - let currentMetrics: any; - let l = 0; - const currentMetricNames: { [key: string]: boolean } = {}; - - if (config.mode === 'kakfa') { - service = 'kafkametrics'; - metricsQuery = utilities.helpers.kafkaMetricsQuery; - } else if (config.mode === 'kubernetes') { - service = 'kubernetesmetrics'; - metricsQuery = utilities.helpers.promMetricsQuery; - } else { - throw new Error('Unrecognized mode'); - } - - currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${config.mode}';`); - currentMetrics = currentMetrics.rows; - if (currentMetrics.length > 0) { - currentMetrics.forEach((el: any) => { - const { metric, selected } = el; - currentMetricNames[metric] = selected; - l = currentMetrics.length; - }); - } - - setInterval(() => { - metricsQuery(config) - .then(async (parsedArray: any[]) => { - if (l !== parsedArray.length) { - l = await postgres.addMetrics(parsedArray, config.mode, currentMetricNames); - } - const documents: any[] = []; - for (const metric of parsedArray) { - if (currentMetricNames[metric.metric]) documents.push(metric); - } - const numRows = documents.length; - const queryString = createQueryString(numRows, service); - const queryArray = createQueryArray(documents); - return client.query(queryString, queryArray); - }) - .then(() => console.log(`${config.mode} metrics recorded in PostgreSQL`)) - .catch((err: any) => - console.log(`Error inserting ${config.mode} metrics into PostgreSQL:\n`, err) - ); - }, config.interval); -}; - -postgres.getSavedMetricsLength = async ( - mode: string, - currentMetricNames: { [key: string]: boolean } -): Promise => { - let currentMetrics = await client.query(`SELECT * FROM metrics WHERE mode='${mode}';`); - if (currentMetrics.rows.length > 0) { - currentMetrics.rows.forEach((el: any) => { - const { metric, selected } = el; - currentMetricNames[metric] = selected; - }); - } - return currentMetrics.rows.length || 0; -}; - -postgres.addMetrics = async ( - arr: any[], - mode: string, - currentMetricNames: { [key: string]: boolean } -): Promise => { - let metricsQueryString = 'INSERT INTO metrics (metric, selected, mode) VALUES '; - arr.forEach((el: any) => { - if (!(el.metric in currentMetricNames)) { - currentMetricNames[el.metric] = true; - metricsQueryString = metricsQueryString.concat(`('${el.metric}', true, '${mode}'), `); - } - }); - metricsQueryString = metricsQueryString.slice(0, metricsQueryString.lastIndexOf(', ')).concat(';'); - await client.query(metricsQueryString); - return arr.length; -}; - export default postgres; diff --git a/chronos_npm_package/controllers/utilities.ts b/chronos_npm_package/controllers/utilities.ts index bd914e3f7..52f8b7cdd 100644 --- a/chronos_npm_package/controllers/utilities.ts +++ b/chronos_npm_package/controllers/utilities.ts @@ -1,2094 +1,3 @@ -// // const axios = require('axios').default; -// import axios from 'axios'; - -// import { createGrafanaPanelObject, updateGrafanaPanelObject }from './GrafanaPanel'; -// /** -// * User Config object { -// microservice: string - Name of the microservice. Will be used as a table name in the chronos's db -// interval: number - The number of milliseconds between every collection of data -// dockerized: boolean - Should be set to true if the service is running inside of a container -// jmxuri: string - (optional) The address exposed by the JMX Exporter for collecting Kafka metrics -// database: { -// connection: should be a string and only supports 'REST' and 'gRPC' -// type: should be a string and only supports 'MongoDB' and 'PostgreSQL'. -// URI: should be a connection string to the database where you intend Chronos to record metrics -// } -// notifications: an array - optional for configuring slack or email notifications -// } -// */ - -// /** -// * Helper function to validate input from user's configuration options -// * Throws an error on input valid data types or on missing fields -// * Sets the default interval to 5 seconds and dockerized to false -// */ -// const helpers = { -// /** Validate all required fields exist and are valid input types */ -// validateInput: config => { -// const out = config; -// const { -// microservice, -// database, -// interval, -// dockerized, -// jmxuri, -// port, -// mode, -// promService, -// promPort, -// } = config; - -// if (!microservice || typeof microservice !== 'string') { -// throw new Error('Invalid input "microservice": Please provide a name for your microservice'); -// } - -// if (!database.type || typeof database.type !== 'string') { -// throw new Error('Invalid input "database type": Chronos supports PostgreSQL and MongoDB'); -// } - -// if (!database.URI || typeof database.URI !== 'string') { -// throw new Error('Invalid input "database URI": Please provide the URI to your database'); -// } -// if (!database.connection || typeof database.connection !== 'string') { -// throw new Error( -// 'Invalid input "database connection type: Please provide the type of connection' -// ); -// } - -// const modeTypes = ['kafka', 'kubernetes', 'microservices', 'docker']; - -// if (!mode || !modeTypes.includes(mode)) { -// throw new Error( -// 'You must input a mode into your chronos.config file. The mode may either be "kubernetes", "kafka", "microservice", or "docker"' -// ); -// } - -// if (mode === 'kafka' && jmxuri && typeof jmxuri !== 'string') { -// throw new Error( -// 'Invalid input for "jmxuri" in chronos-config.js: Please provide the address of the JMX Exporter' -// ); -// } - -// if (mode === 'kubernetes' || mode === 'docker') { -// if ( -// !promService || -// typeof promService !== 'string' || -// !promPort || -// typeof promPort !== 'number' -// ) { -// throw new Error( -// 'Invalid input for promService or promPort. promPort must be number and promService must be a string' -// ); -// } -// } - -// if (database.type !== 'PostgreSQL' && database.type !== 'MongoDB') { -// throw new Error( -// `Invalid input "${database.type}". Chronos only supports PostgreSQL and MongoDB.` -// ); -// } -// if (database.connection !== 'REST' && database.connection !== 'gRPC') { -// throw new Error( -// `Invalid database connection "${database.connection}". Chronos only supports REST and gRPC.` -// ); -// } - -// // Default interval to one minute -// if (!interval || typeof interval !== 'number') config.interval = 60000; - -// // Default dockerized to false -// if (dockerized === undefined || dockerized.constructor.name !== 'Boolean') -// config.dockerized = false; - -// return config; -// }, - -// /** -// * Sets up notifications depending if the user provides the options -// * Method adds properties to the existing userConfig object with the key -// * being the notification type and the value being the notification settings -// */ - -// addNotifications: config => { -// const { notifications } = config; -// //POTENTIAL BUG: notifications defaults to an empty array so it should always be truthy. I think code will fire regardless -// if (notifications) { -// // Current notification methods supported -// const features = ['slack', 'email', 'sms']; - -// // Setup notifications for user -// notifications.forEach(obj => { -// const { type } = obj; - -// // Throw errors on unsupported notification methods -// if (!features.includes(type)) { -// throw new Error(`${type} is not a supported notification method for Chronos`); -// } else { -// config[type] = obj.settings; -// } -// }); -// } -// return config; -// }, - -// /** -// * Determines URI if the user is running kafka or kubernetes -// * @param {*} config -// * @returns URI for exposed port -// */ - -// getMetricsURI: config => { -// if (config.mode === 'kafka') { -// return config.jmxuri; -// } else if (config.mode === 'kubernetes' || config.mode === 'docker') { -// return `http://${config.promService}:${config.promPort}/api/v1/query?query=`; -// } else { -// throw new Error('Unrecognized mode'); -// } -// }, - -// /** -// * Confirms URI provided is queryable -// * @param {*} config -// * @returns undefined -// */ -// testMetricsQuery: async config => { -// let URI = helpers.getMetricsURI(config); -// URI += 'up'; -// try { -// const response = await axios.get(URI); -// if (response.status !== 200) console.error('Invalid response from metrics server:', URI, response.status, response.data); -// else console.log('Successful initial response from metrics server:', URI); -// return response; -// } catch (error) { -// console.error(error); -// throw new Error('Unable to query metrics server: ' + URI); -// } -// }, - -// /** -// * Queries the kafka URI and parses response data -// * @param {*} config -// * @returns parsed response data -// */ - -// kafkaMetricsQuery: async config => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data); -// } catch (error) { -// return console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// } -// }, - -// /** -// * Confirms that configuration mode is kafka and parses through data in response to an axios.get request -// * @param {*} mode string -// * @param {*} text object -// * @returns object with the gathered metric, value, time gathered, and category of event -// */ - -// extractWord: (mode, text) => { -// const res = []; -// const arr = text.split('\n'); -// const time = Date.now(); -// const category = 'Event'; - -// for (const element of arr) { -// // Handle comments and edge cases -// if (!element || element[0] === '#') continue; -// if ( -// mode === 'kafka' && -// (element.substring(0, 3) === 'jmx' || element.substring(0, 4) === "'jmx") -// ) -// continue; - -// const lastSpace = element.lastIndexOf(' '); -// const metric = element.slice(0, lastSpace); -// const value = Number(element.slice(lastSpace + 1)); -// if (!isNaN(value)) { -// res.push({ metric, value, time, category }); -// } else { -// console.error( -// 'The following metric is invalid and was not saved to the database:\n', -// element -// ); -// } -// } -// // console.log('Parsed Array length is: ', res.length); -// return res; -// }, - -// /** -// * Querys all available prometheus metrics and returns a parsed response -// * @param {*} config -// * @returns -// */ -// promMetricsQuery: async config => { -// const URI = helpers.getMetricsURI(config); -// let query; -// if (config.mode === 'docker') { -// query = URI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`); -// } else { -// query = URI + encodeURIComponent('{__name__=~".+",container=""}'); -// } -// try { -// const response = await axios.get(query); -// //console.log('promMetricsQuery line 236:', response.data.data.result); -// return helpers.parseProm(config, response.data.data.result); -// } catch (error) { -// return console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// } -// }, - -// /** -// * Parses response from Prometheus request and returns object with -// * @param {*} data -// * @returns bject with the gathered metric, value, time gathered, and category of event -// */ -// parseProm: (config, data) => { -// const res = []; -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// /** -// * Opportunity for improvement: Prometheus may query metrics that have the same job + instance + metric -// * which means they end up having the same name (see name variable). -// * When this happens, it means that the parsedArray returned from this function -// * will have a different length than the metricNames length. -// * To avoid this, Chronos currently only saves the first occurence of any particular ${name}. -// * This can be improved in the future by distinguishing between each ${name}, -// * but be aware that if the ${name} is too long, it will be rejected by the database. -// */ - -// const names = new Set(); - -// for (const info of data) { -// let wholeName; -// let name; -// if (config.mode === 'docker') { -// if (!info.metric.name) continue; -// wholeName = info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric.job) continue; -// // Set the base name using the job, IP, and metric __name__ -// wholeName = info.metric.job + '/' + info.metric.instance + '/' + info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } -// if (names.has(name)) continue; -// else { -// names.add(name); -// // Tack on the remaining key's values from the remaining metric descriptors -// // This might result in an overly-long metric name though, so commented for now -// // for (let field in info.metric) { -// // if ((field in usedCategories)) continue -// // name += '/' + info.metric[field]; -// // } - -// let value = info.value; -// if (value.constructor.name === 'Array') value = info.value[1]; -// if (isNaN(value) || value === 'NaN') continue; - -// res.push({ -// metric: wholeName, -// value: value, -// time: time, -// category: category, -// }) -// } -// } -// console.log('is size equal?', res.length === new Set(res).size); -// //console.log("!res is: ", res); -// return res; -// }, - -// createGrafanaDashboard: async ( -// metric, -// datasource, -// graphType, -// token -// ) => { -// let uid = metric.metric.replace(/.*\/.*\//g, '') -// if (metric.metric.replace(/.*\/.*\//g, '').length >= 40) { -// uid = metric.metric.slice(metric.metric.length - 39); -// } -// //console.log("uid is: ", uid) -// //console.log("metric is: ", metric) -// // create dashboard object boilerplate -// const dashboard = { -// "dashboard": { -// "id": null, -// "uid": uid, -// "title": metric.metric.replace(/.*\/.*\//g, ''), -// "tags": ["templated"], -// "timezone": "browser", -// "schemaVersion": 16, -// "version": 0, -// "refresh": "10s", -// panels: [], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// // push panel into dashboard object with a line for each metric in promQLQueries object -// dashboard.dashboard.panels.push(createGrafanaPanelObject(metric, datasource, graphType)); -// try { -// // POST request to Grafana Dashboard API to create a dashboard -// const dashboardResponse = await axios.post( -// 'http://grafana:3000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': token -// }, -// } -// ); - -// // Descriptive error log for developers -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In createGrafanaDashboard.' -// ); -// } else { -// // A simple console log to show when graphs are done being posted to Grafana. -// console.log(`πŸ“Š Grafana graphs for the ${metric.metric.replace(/.*\/.*\//g, '')} metric are ready πŸ“Š `); -// } -// } catch (err) { -// console.log(err); -// } -// }, - -// getGrafanaDatasource: async (token) => { -// // Fetch datasource information from grafana API. -// // This datasource is PRECONFIGURED on launch using grafana config. -// console.log('In utilities.getGrafanaDatasource!!!'); -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': token -// }, -// }); -// // console.log('utilities.getGrafanaDatasource line 379:', datasourceResponse); -// console.log("Successfully fetched datasource from Grafana API") -// // Create a datasource object to be used within panels. -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// // console.log('datasource is', datasource) - -// return datasource; -// }, - -// updateGrafanaDatasource: async (token) => { -// // Fetch datasource information from grafana API. -// // This datasource is PRECONFIGURED on launch using grafana config. -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': token -// }, -// }); -// console.log("Successfully fetched datasource from Grafana API") -// // Create a datasource object to be used within panels. -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// console.log('datasource is', datasource) - -// return datasource; -// }, - -// updateGrafanaDashboard: async (graphType, token, metric, datasource) => { -// let uid = metric.replace(/.*\/.*\//g, '') -// if (metric.replace(/.*\/.*\//g, '').length >= 40) { -// uid = metric.slice(metric.length - 39); -// } -// //console.log("uid is: ", uid) -// //console.log("metric is: ", metric) -// // create dashboard object boilerplate -// const dashboard = { -// "dashboard": { -// "id": null, -// "uid": uid, -// "title": metric.replace(/.*\/.*\//g, ''), -// "tags": ["templated"], -// "timezone": "browser", -// "schemaVersion": 16, -// "version": 0, -// "refresh": "10s", -// panels: [], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// // push panel into dashboard object with a line for each metric in promQLQueries object -// dashboard.dashboard.panels.push(updateGrafanaPanelObject(metric, datasource, graphType)); - -// try { -// // POST request to Grafana Dashboard API to create a dashboard -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// 'Authorization': token -// }, -// } -// ); -// //console.log("dashboardResponse is: ", dashboardResponse) - -// // Descriptive error log for developers -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In updateGrafanaDashboard.' -// ); -// } else { -// // A simple console log to show when graphs are done being posted to Grafana. -// console.log(`πŸ“Š Grafana graphs πŸ“Š for the ${metric.replace(/.*\/.*\//g, '')} has been updated!!!`); -// } -// } catch (err) { -// console.log(err); -// } - -// } -// }; - -// // module.exports = helpers; - -// export default {helpers}; - -// import axios from 'axios'; - -// // Use a default import from GrafanaPanel and destructure the needed functions. -// import GrafanaPanel from './GrafanaPanel'; -// const { createGrafanaPanelObject, updateGrafanaPanelObject } = GrafanaPanel; - -// // Define an interface for metric data to help TypeScript infer the type. -// interface MetricData { -// metric: string; -// value: number; -// time: number; -// category: string; -// } - -// const helpers = { -// /** Validate all required fields exist and are valid input types */ -// validateInput: config => { -// const out = config; -// const { -// microservice, -// database, -// interval, -// dockerized, -// jmxuri, -// port, -// mode, -// promService, -// promPort, -// } = config; - -// if (!microservice || typeof microservice !== 'string') { -// throw new Error( -// 'Invalid input "microservice": Please provide a name for your microservice' -// ); -// } - -// if (!database.type || typeof database.type !== 'string') { -// throw new Error('Invalid input "database type": Chronos supports PostgreSQL and MongoDB'); -// } - -// if (!database.URI || typeof database.URI !== 'string') { -// throw new Error( -// 'Invalid input "database URI": Please provide the URI to your database' -// ); -// } -// if (!database.connection || typeof database.connection !== 'string') { -// throw new Error( -// 'Invalid input "database connection type": Please provide the type of connection' -// ); -// } - -// const modeTypes = ['kafka', 'kubernetes', 'microservices', 'docker']; - -// if (!mode || !modeTypes.includes(mode)) { -// throw new Error( -// 'You must input a mode into your chronos.config file. The mode may either be "kubernetes", "kafka", "microservice", or "docker"' -// ); -// } - -// if (mode === 'kafka' && jmxuri && typeof jmxuri !== 'string') { -// throw new Error( -// 'Invalid input for "jmxuri" in chronos-config.js: Please provide the address of the JMX Exporter' -// ); -// } - -// if (mode === 'kubernetes' || mode === 'docker') { -// if ( -// !promService || -// typeof promService !== 'string' || -// !promPort || -// typeof promPort !== 'number' -// ) { -// throw new Error( -// 'Invalid input for promService or promPort. promPort must be number and promService must be a string' -// ); -// } -// } - -// if (database.type !== 'PostgreSQL' && database.type !== 'MongoDB') { -// throw new Error( -// `Invalid input "${database.type}". Chronos only supports PostgreSQL and MongoDB.` -// ); -// } -// if (database.connection !== 'REST' && database.connection !== 'gRPC') { -// throw new Error( -// `Invalid database connection "${database.connection}". Chronos only supports REST and gRPC.` -// ); -// } - -// // Default interval to one minute -// if (!interval || typeof interval !== 'number') config.interval = 60000; - -// // Default dockerized to false -// if (dockerized === undefined || dockerized.constructor.name !== 'Boolean') -// config.dockerized = false; - -// return config; -// }, - -// /** -// * Sets up notifications depending if the user provides the options. -// * Adds properties to the userConfig object with the key being the notification type and the value being its settings. -// */ -// addNotifications: config => { -// const { notifications } = config; -// if (notifications) { -// const features = ['slack', 'email', 'sms']; - -// notifications.forEach(obj => { -// const { type } = obj; -// if (!features.includes(type)) { -// throw new Error(`${type} is not a supported notification method for Chronos`); -// } else { -// config[type] = obj.settings; -// } -// }); -// } -// return config; -// }, - -// /** -// * Determines the URI based on the mode (kafka or kubernetes/docker) -// */ -// getMetricsURI: config => { -// if (config.mode === 'kafka') { -// return config.jmxuri; -// } else if (config.mode === 'kubernetes' || config.mode === 'docker') { -// return `http://${config.promService}:${config.promPort}/api/v1/query?query=`; -// } else { -// throw new Error('Unrecognized mode'); -// } -// }, - -// /** -// * Tests that the metrics URI is queryable. -// */ -// testMetricsQuery: async config => { -// let URI = helpers.getMetricsURI(config); -// URI += 'up'; -// try { -// const response = await axios.get(URI); -// if (response.status !== 200) -// console.error( -// 'Invalid response from metrics server:', -// URI, -// response.status, -// response.data -// ); -// else console.log('Successful initial response from metrics server:', URI); -// return response; -// } catch (error) { -// console.error(error); -// throw new Error('Unable to query metrics server: ' + URI); -// } -// }, - -// /** -// * Queries the Kafka URI and parses response data. -// */ -// kafkaMetricsQuery: async config => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// } -// }, - -// /** -// * Parses response text from a Kafka metrics query. -// */ -// extractWord: (mode, text) => { -// const res: MetricData[] = []; // Explicitly typed array -// const arr = text.split('\n'); -// const time = Date.now(); -// const category = 'Event'; - -// for (const element of arr) { -// if (!element || element[0] === '#') continue; -// if ( -// mode === 'kafka' && -// (element.substring(0, 3) === 'jmx' || element.substring(0, 4) === "'jmx") -// ) -// continue; - -// const lastSpace = element.lastIndexOf(' '); -// const metric = element.slice(0, lastSpace); -// const value = Number(element.slice(lastSpace + 1)); -// if (!isNaN(value)) { -// res.push({ metric, value, time, category }); -// } else { -// console.error( -// 'The following metric is invalid and was not saved to the database:\n', -// element -// ); -// } -// } -// return res; -// }, - -// /** -// * Queries all available Prometheus metrics and returns parsed response data. -// */ -// promMetricsQuery: async config => { -// const URI = helpers.getMetricsURI(config); -// let query; -// if (config.mode === 'docker') { -// query = URI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`); -// } else { -// query = URI + encodeURIComponent('{__name__=~".+",container=""}'); -// } -// try { -// const response = await axios.get(query); -// return helpers.parseProm(config, response.data.data.result); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// } -// }, - -// /** -// * Parses Prometheus query responses. -// */ -// parseProm: (config, data) => { -// const res: MetricData[] = []; // Explicitly typed array -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// const names = new Set(); - -// for (const info of data) { -// let wholeName; -// let name; -// if (config.mode === 'docker') { -// if (!info.metric.name) continue; -// wholeName = info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric.job) continue; -// wholeName = -// info.metric.job + -// '/' + -// info.metric.instance + -// '/' + -// info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } -// if (names.has(name)) continue; -// names.add(name); - -// let value = info.value; -// if (Array.isArray(value)) value = info.value[1]; -// if (isNaN(value) || value === 'NaN') continue; - -// res.push({ -// metric: wholeName, -// value: value, -// time: time, -// category: category, -// }); -// } -// console.log('is size equal?', res.length === new Set(res).size); -// return res; -// }, - -// /** -// * Creates a Grafana dashboard with a panel based on the provided metric. -// */ -// createGrafanaDashboard: async (metric, datasource, graphType, token) => { -// let uid = metric.metric.replace(/.*\/.*\//g, ''); -// if (metric.metric.replace(/.*\/.*\//g, '').length >= 40) { -// uid = metric.metric.slice(metric.metric.length - 39); -// } -// const dashboard = { -// dashboard: { -// id: null, -// uid: uid, -// title: metric.metric.replace(/.*\/.*\//g, ''), -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as any[], // Explicitly typed panels array -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push( -// createGrafanaPanelObject(metric, datasource, graphType) -// ); -// try { -// const dashboardResponse = await axios.post( -// 'http://grafana:3000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// } -// ); - -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In createGrafanaDashboard.' -// ); -// } else { -// console.log( -// `πŸ“Š Grafana graphs for the ${metric.metric.replace(/.*\/.*\//g, '')} metric are ready πŸ“Š` -// ); -// } -// } catch (err) { -// console.log(err); -// } -// }, - -// /** -// * Fetches Grafana datasource information. -// */ -// getGrafanaDatasource: async token => { -// console.log('In utilities.getGrafanaDatasource!!!'); -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// return datasource; -// }, - -// /** -// * Fetches Grafana datasource information from a different endpoint. -// */ -// updateGrafanaDatasource: async token => { -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// console.log('datasource is', datasource); -// return datasource; -// }, - -// /** -// * Updates a Grafana dashboard by pushing an updated panel. -// */ -// updateGrafanaDashboard: async (graphType, token, metric, datasource) => { -// let uid = metric.replace(/.*\/.*\//g, ''); -// if (metric.replace(/.*\/.*\//g, '').length >= 40) { -// uid = metric.slice(metric.length - 39); -// } -// const dashboard = { -// dashboard: { -// id: null, -// uid: uid, -// title: metric.replace(/.*\/.*\//g, ''), -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as any[], // Explicitly typed panels array -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push( -// updateGrafanaPanelObject(metric, datasource, graphType) -// ); - -// try { -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// } -// ); -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In updateGrafanaDashboard.' -// ); -// } else { -// console.log( -// `πŸ“Š Grafana graphs πŸ“Š for the ${metric.replace(/.*\/.*\//g, '')} has been updated!!!` -// ); -// } -// } catch (err) { -// console.log(err); -// } -// }, -// }; - -// export default { helpers }; - -// import axios from 'axios'; - -// // Use a default import from GrafanaPanel and destructure the needed functions. -// import GrafanaPanel from './GrafanaPanel'; -// const { createGrafanaPanelObject, updateGrafanaPanelObject } = GrafanaPanel; - -// // Define an interface for metric data to help TypeScript infer the type. -// interface MetricData { -// metric: string; -// value: number; -// time: number; -// category: string; -// } - -// const helpers = { -// /** -// * Queries all available Prometheus metrics and returns parsed response data. -// */ -// promMetricsQuery: async (config) => { -// const URI = helpers.getMetricsURI(config); -// let query; -// if (config.mode === 'docker') { -// query = URI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`); -// } else { -// query = URI + encodeURIComponent('{__name__=~".+",container=""}'); -// } - -// try { -// const response = await axios.get(query); -// return helpers.parseProm(config, response.data?.data?.result ?? []); // βœ… No more TS18046 error -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** -// * Queries the Kafka URI and parses response data. -// */ -// kafkaMetricsQuery: async (config) => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data ?? ""); // βœ… Safe fallback for missing response data -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** -// * Fetches Grafana datasource information. -// */ -// getGrafanaDatasource: async (token) => { -// console.log('In utilities.getGrafanaDatasource!!!'); -// try { -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); - -// console.log('Successfully fetched datasource from Grafana API'); -// return { -// type: datasourceResponse.data?.[0]?.type ?? "unknown", // βœ… Prevents undefined errors -// uid: datasourceResponse.data?.[0]?.uid ?? "unknown", -// }; -// } catch (error) { -// console.error("Error fetching Grafana datasource:", error); -// return { type: "unknown", uid: "unknown" }; -// } -// }, - -// /** -// * Fetches Grafana datasource information from a different endpoint. -// */ -// updateGrafanaDatasource: async (token) => { -// try { -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); - -// console.log('Successfully fetched datasource from Grafana API'); -// return { -// type: datasourceResponse.data?.[0]?.type ?? "unknown", // βœ… Prevents undefined errors -// uid: datasourceResponse.data?.[0]?.uid ?? "unknown", -// }; -// } catch (error) { -// console.error("Error updating Grafana datasource:", error); -// return { type: "unknown", uid: "unknown" }; -// } -// }, - -// /** -// * Parses Prometheus query responses. -// */ -// parseProm: (config, data) => { -// const res: MetricData[] = []; -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// const names = new Set(); - -// for (const info of data ?? []) { // βœ… Prevents iteration on undefined -// let wholeName; -// let name; -// if (config.mode === 'docker') { -// if (!info.metric?.name) continue; -// wholeName = info.metric?.['__name__'] ?? "unknown"; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric?.job) continue; -// wholeName = -// info.metric?.job + -// '/' + -// info.metric?.instance + -// '/' + -// info.metric?.['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } -// if (names.has(name)) continue; -// names.add(name); - -// let value = info.value; -// if (Array.isArray(value)) value = info.value[1] ?? 0; -// if (isNaN(value) || value === 'NaN') continue; - -// res.push({ -// metric: wholeName, -// value: value, -// time: time, -// category: category, -// }); -// } -// console.log('is size equal?', res.length === new Set(res).size); -// return res; -// } -// }; - -// export default { helpers }; -// import axios from 'axios'; -// import { createGrafanaPanelObject, updateGrafanaPanelObject } from './GrafanaPanel'; - -// // Define an interface for metric data to help TypeScript infer the type. -// interface MetricData { -// metric: string; -// value: number; -// time: number; -// category: string; -// } - -// const helpers = { -// /** -// * Queries all available Prometheus metrics and returns parsed response data. -// */ -// promMetricsQuery: async (config) => { -// const URI = helpers.getMetricsURI(config); -// let query; -// if (config.mode === 'docker') { -// query = URI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`); -// } else { -// query = URI + encodeURIComponent('{__name__=~".+",container=""}'); -// } - -// try { -// const response = await axios.get(query); -// return helpers.parseProm(config, response.data?.data?.result ?? []); // βœ… Prevents TS18046 error -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** -// * Queries the Kafka URI and parses response data. -// */ -// kafkaMetricsQuery: async (config) => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data ?? ""); // βœ… Safe fallback for missing response data -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** -// * Fetches Grafana datasource information. -// */ -// getGrafanaDatasource: async (token) => { -// console.log('In utilities.getGrafanaDatasource...'); -// try { -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); - -// console.log('Successfully fetched datasource from Grafana API'); -// return { -// type: datasourceResponse.data?.[0]?.type ?? "unknown", // βœ… Prevents undefined errors -// uid: datasourceResponse.data?.[0]?.uid ?? "unknown", -// }; -// } catch (error) { -// console.error("Error fetching Grafana datasource:", error); -// return { type: "unknown", uid: "unknown" }; -// } -// }, - -// /** -// * Fetches Grafana datasource information from a different endpoint. -// */ -// updateGrafanaDatasource: async (token) => { -// try { -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); - -// console.log('Successfully fetched datasource from Grafana API'); -// return { -// type: datasourceResponse.data?.[0]?.type ?? "unknown", // βœ… Prevents undefined errors -// uid: datasourceResponse.data?.[0]?.uid ?? "unknown", -// }; -// } catch (error) { -// console.error("Error updating Grafana datasource:", error); -// return { type: "unknown", uid: "unknown" }; -// } -// }, - -// /** -// * Parses Prometheus query responses. -// */ -// parseProm: (config, data) => { -// const res: MetricData[] = []; -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// const names = new Set(); - -// for (const info of data ?? []) { // βœ… Prevents iteration on undefined -// let wholeName; -// let name; -// if (config.mode === 'docker') { -// if (!info.metric?.name) continue; -// wholeName = info.metric?.['__name__'] ?? "unknown"; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric?.job) continue; -// wholeName = -// info.metric?.job + -// '/' + -// info.metric?.instance + -// '/' + -// info.metric?.['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } -// if (names.has(name)) continue; -// names.add(name); - -// let value = info.value; -// if (Array.isArray(value)) value = info.value[1] ?? 0; -// if (isNaN(value) || value === 'NaN') continue; - -// res.push({ -// metric: wholeName, -// value: value, -// time: time, -// category: category, -// }); -// } -// console.log('is size equal?', res.length === new Set(res).size); -// return res; -// } -// }; - -// export default { helpers }; -// import axios from 'axios'; -// import { createGrafanaPanelObject, updateGrafanaPanelObject } from './GrafanaPanel.ts'; - -// interface DatabaseConfig { -// type: 'MongoDB' | 'PostgreSQL'; -// URI: string; -// connection?: 'REST' | 'gRPC'; -// } - -// interface ChronosConfig { -// microservice: string; -// interval: number; -// mode: 'kafka' | 'kubernetes' | 'microservices' | 'docker'; -// dockerized?: boolean; -// database: DatabaseConfig; -// notifications?: any[]; -// } - -// interface MetricData { -// metric: string; -// value: number; -// time: number; -// category: string; -// } - -// export const helpers = { -// /** Validate all required fields exist and are valid input types */ -// validateInput: (config: ChronosConfig): ChronosConfig => { -// if (!config) throw new Error("Chronos config is undefined"); - -// const { microservice, database, interval, dockerized, mode, jmxuri, promService, promPort } = config; -// if (!microservice || typeof microservice !== "string") throw new Error("Invalid input: microservice must be a string."); -// if (!database?.type || !['MongoDB', 'PostgreSQL'].includes(database.type)) throw new Error("Invalid database type."); - -// if (mode === "kafka" && jmxuri && typeof jmxuri !== "string") throw new Error("Invalid jmxuri, must be a string."); -// if ((mode === "kubernetes" || mode === "docker") && (!promService || typeof promService !== "string" || !promPort || typeof promPort !== "number")) { -// throw new Error("Invalid Prometheus service or port."); -// } - -// config.interval = interval ?? 60000; -// config.dockerized = dockerized ?? false; -// return config; -// }, - -// addNotifications: (config: ChronosConfig): ChronosConfig => { -// const { notifications } = config; -// if (notifications) { -// const features = ["slack", "email", "sms"]; -// notifications.forEach(obj => { -// if (!features.includes(obj.type)) { -// throw new Error(`${obj.type} is not a supported notification method.`); -// } -// config[obj.type] = obj.settings; -// }); -// } -// return config; -// }, - -// getMetricsURI: (config: ChronosConfig): string => { -// if (config.mode === "kafka") return config.jmxuri ?? ""; -// if (config.mode === "kubernetes" || config.mode === "docker") return `http://${config.promService}:${config.promPort}/api/v1/query?query=`; -// throw new Error("Unrecognized mode"); -// }, - -// testMetricsQuery: async (config: ChronosConfig) => { -// const URI = helpers.getMetricsURI(config) + "up"; -// try { -// const response = await axios.get(URI); -// if (response.status !== 200) console.error("Invalid response:", URI, response.status); -// return response; -// } catch (error) { -// console.error(error); -// throw new Error(`Unable to query metrics server: ${URI}`); -// } -// }, - -// kafkaMetricsQuery: async (config: ChronosConfig): Promise => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data ?? ""); -// } catch (error) { -// console.error(config.mode, "Error fetching from URI:", URI, error); -// return []; -// } -// }, - -// extractWord: (mode: string, text: string): MetricData[] => { -// const res: MetricData[] = []; -// const arr = text.split("\n"); -// const time = Date.now(); -// const category = "Event"; - -// for (const element of arr) { -// if (!element || element[0] === "#") continue; -// if (mode === "kafka" && (element.startsWith("jmx") || element.startsWith("'jmx"))) continue; - -// const lastSpace = element.lastIndexOf(" "); -// const metric = element.slice(0, lastSpace); -// const value = Number(element.slice(lastSpace + 1)); -// if (!isNaN(value)) { -// res.push({ metric, value, time, category }); -// } -// } -// return res; -// }, - -// promMetricsQuery: async (config: ChronosConfig): Promise => { -// const URI = helpers.getMetricsURI(config); -// const query = config.mode === "docker" ? URI + encodeURIComponent(`{__name__=~".+",name="${config.database.type}"}`) : URI + encodeURIComponent('{__name__=~".+",container=""}'); - -// try { -// const response = await axios.get(query); -// return helpers.parseProm(config, response.data?.data?.result ?? []); -// } catch (error) { -// console.error(config.mode, "Error fetching from URI:", URI, error); -// return []; -// } -// }, - -// parseProm: (config: ChronosConfig, data: any[]): MetricData[] => { -// const res: MetricData[] = []; -// const time = Date.now(); -// const category = config.mode === "docker" ? `${config.database.type}` : "Event"; -// const names = new Set(); - -// for (const info of data) { -// let wholeName = info?.metric?.["__name__"] ?? ""; -// let name = wholeName.replace(/.*\/.*\//g, ""); - -// if (names.has(name)) continue; -// names.add(name); - -// let value = Array.isArray(info.value) ? info.value[1] : info.value; -// if (!isNaN(value)) { -// res.push({ metric: wholeName, value, time, category }); -// } -// } -// return res; -// } -// }; - -// export default { helpers }; -// import axios from 'axios'; -// import { createGrafanaPanelObject, updateGrafanaPanelObject } from '../controllers/GrafanaPanel'; - -// /** -// * Chronos config interface (basic example) -// */ -// interface DatabaseConfig { -// type: 'MongoDB' | 'PostgreSQL'; -// URI: string; -// connection?: 'REST' | 'gRPC'; -// } - -// interface ChronosConfig { -// microservice: string; -// interval: number; -// dockerized?: boolean; -// jmxuri?: string; -// port?: number; -// mode: 'kafka' | 'kubernetes' | 'microservices' | 'docker'; -// promService?: string; -// promPort?: number; -// database: DatabaseConfig; -// notifications?: Array<{ -// type: 'slack' | 'email' | 'sms'; -// settings: any; -// }>; -// containerName?: string; -// grafanaAPIKey?: string; // in case you're storing the API key -// } - -// /** -// * We nest all Chronos utility functions inside a `helpers` object. -// * That means calls will be `utilities.helpers.`. -// */ -// const helpers = { -// /** Validate config fields and set defaults */ -// validateInput: (config: Partial): ChronosConfig => { -// const { -// microservice, -// database, -// interval, -// dockerized, -// jmxuri, -// mode, -// promService, -// promPort, -// } = config as ChronosConfig; - -// if (!microservice || typeof microservice !== 'string') { -// throw new Error('Invalid input "microservice": Please provide a name for your microservice'); -// } - -// if (!database?.type || typeof database.type !== 'string') { -// throw new Error('Invalid input "database type": Chronos supports PostgreSQL and MongoDB'); -// } - -// if (!database.URI || typeof database.URI !== 'string') { -// throw new Error('Invalid input "database URI": Please provide the URI to your database'); -// } - -// if (!database.connection || typeof database.connection !== 'string') { -// throw new Error('Invalid "database connection" type: Please provide "REST" or "gRPC"'); -// } - -// // Allowed modes for Chronos -// const modeTypes = ['kafka', 'kubernetes', 'microservices', 'docker']; -// if (!mode || !modeTypes.includes(mode)) { -// throw new Error( -// 'You must input a mode. Options: "kubernetes", "kafka", "microservices", or "docker"' -// ); -// } - -// if (mode === 'kafka' && jmxuri && typeof jmxuri !== 'string') { -// throw new Error('Invalid input for "jmxuri": Must be a string'); -// } - -// if ((mode === 'kubernetes' || mode === 'docker') && (!promService || !promPort)) { -// throw new Error('Missing "promService" or "promPort" for mode "kubernetes"/"docker"'); -// } - -// if (database.type !== 'PostgreSQL' && database.type !== 'MongoDB') { -// throw new Error(`Invalid database type "${database.type}". Only PostgreSQL and MongoDB.`); -// } -// if (database.connection !== 'REST' && database.connection !== 'gRPC') { -// throw new Error(`Invalid db connection "${database.connection}". Only REST/gRPC supported.`); -// } - -// // Default interval to one minute -// if (!interval || typeof interval !== 'number') config.interval = 60000; - -// // Default dockerized to false -// if (dockerized === undefined) config.dockerized = false; - -// return config as ChronosConfig; -// }, - -// /** Configure notifications if user provides them */ -// addNotifications: (config: ChronosConfig): ChronosConfig => { -// const { notifications } = config; -// if (notifications) { -// const validFeatures = ['slack', 'email', 'sms']; -// notifications.forEach(obj => { -// if (!validFeatures.includes(obj.type)) { -// throw new Error(`${obj.type} is not a supported notification method for Chronos`); -// } else { -// (config as any)[obj.type] = obj.settings; // Insert the notification settings into config -// } -// }); -// } -// return config; -// }, - -// /** Return the metrics URI depending on "mode" */ -// getMetricsURI: (config: ChronosConfig): string => { -// if (config.mode === 'kafka') { -// return config.jmxuri || ''; -// } else if (config.mode === 'kubernetes' || config.mode === 'docker') { -// return `http://${config.promService}:${config.promPort}/api/v1/query?query=`; -// } -// throw new Error('Unrecognized mode'); -// }, - -// /** Test if the metrics endpoint is reachable */ -// testMetricsQuery: async (config: ChronosConfig) => { -// let URI = helpers.getMetricsURI(config) + 'up'; -// try { -// const response = await axios.get(URI); -// if (response.status !== 200) { -// console.error('Invalid response from metrics server:', URI, response.status, response.data); -// } else { -// console.log('Successful initial response from metrics server:', URI); -// } -// return response; -// } catch (error) { -// console.error(error); -// throw new Error('Unable to query metrics server: ' + URI); -// } -// }, - -// /** Fetch Kafka metrics, parse them */ -// kafkaMetricsQuery: async (config: ChronosConfig) => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** Parse plaintext Kafka metrics (line-based) */ -// extractWord: (mode: string, text: string) => { -// const res: Array<{ -// metric: string; -// value: number; -// time: number; -// category: string; -// }> = []; - -// const arr = text.split('\n'); -// const time = Date.now(); -// const category = 'Event'; - -// for (const element of arr) { -// if (!element || element.startsWith('#')) continue; -// if (mode === 'kafka' && (element.startsWith('jmx') || element.startsWith("'jmx"))) continue; - -// const lastSpace = element.lastIndexOf(' '); -// const metric = element.slice(0, lastSpace); -// const val = Number(element.slice(lastSpace + 1)); -// if (!isNaN(val)) { -// res.push({ metric, value: val, time, category }); -// } else { -// console.error('Invalid metric (skipped):\n', element); -// } -// } -// return res; -// }, - -// /** Fetch Prometheus metrics, parse them */ -// promMetricsQuery: async (config: ChronosConfig) => { -// const baseURI = helpers.getMetricsURI(config); -// const query = -// config.mode === 'docker' -// ? baseURI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`) -// : baseURI + encodeURIComponent('{__name__=~".+",container=""}'); - -// try { -// const response = await axios.get(query); -// const result = response.data?.data?.result || []; -// return helpers.parseProm(config, result); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', query, '\n', error); -// return []; -// } -// }, - -// /** Parse the Prometheus "result" array into standardized metrics objects */ -// parseProm: ( -// config: ChronosConfig, -// data: any[] -// ): Array<{ metric: string; value: number; time: number; category: string }> => { -// const res = []; -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// const names = new Set(); - -// for (const info of data) { -// let wholeName: string; -// let name: string; - -// if (config.mode === 'docker') { -// if (!info.metric.name) continue; -// wholeName = info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric.job) continue; -// wholeName = info.metric.job + '/' + info.metric.instance + '/' + info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } - -// if (names.has(name)) continue; -// names.add(name); - -// let value = info.value; -// if (Array.isArray(value)) value = value[1]; // [timestamp, value] pattern - -// if (isNaN(value) || value === 'NaN') continue; - -// res.push({ -// metric: wholeName, -// value: Number(value), -// time, -// category, -// }); -// } -// console.log('is size equal?', res.length === new Set(res).size); -// return res; -// }, - -// /** Creates a Grafana dashboard via API */ -// createGrafanaDashboard: async ( -// metric: { metric: string }, -// datasource: { type: string; uid: string }, -// graphType: string, -// token: string -// ) => { -// let uid = metric.metric.replace(/.*\/.*\//g, ''); -// if (uid.length >= 40) { -// uid = uid.slice(uid.length - 39); -// } - -// const dashboard = { -// dashboard: { -// id: null, -// uid, -// title: uid, -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push( -// createGrafanaPanelObject(metric, datasource, graphType) -// ); - -// try { -// const dashboardResponse = await axios.post( -// 'http://grafana:3000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, // e.g. 'Bearer ' -// }, -// }); - -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In createGrafanaDashboard.' -// ); -// } else { -// console.log(`πŸ“Š Grafana graphs for "${metric.metric}" are ready πŸ“Š `); -// } -// } catch (err) { -// console.log('Error creating Grafana dashboard:', err); -// } -// }, - -// /** Fetches an existing datasource from Grafana */ -// getGrafanaDatasource: async (token: string) => { -// console.log('In utilities.getGrafanaDatasource!!!'); -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); - -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// return datasource; -// }, - -// /** Fetches datasource from a different endpoint, presumably a custom plugin */ -// updateGrafanaDatasource: async (token: string) => { -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); -// const datasource = { -// type: datasourceResponse.data[0].type, -// uid: datasourceResponse.data[0].uid, -// }; -// console.log('datasource is', datasource); -// return datasource; -// }, - -// /** Updates an existing dashboard in Grafana */ -// updateGrafanaDashboard: async ( -// graphType: string, -// token: string, -// metric: string, -// datasource: any -// ) => { -// let uid = metric.replace(/.*\/.*\//g, ''); -// if (uid.length >= 40) { -// uid = uid.slice(uid.length - 39); -// } - -// const dashboard = { -// dashboard: { -// id: null, -// uid, -// title: uid, -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push( -// updateGrafanaPanelObject(metric, datasource, graphType) -// ); - -// try { -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); - -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In updateGrafanaDashboard.' -// ); -// } else { -// console.log( -// `πŸ“Š Grafana graphs for "${metric.replace(/.*\/.*\//g, '')}" have been updated!!!` -// ); -// } -// } catch (err) { -// console.log('Error updating Grafana dashboard:', err); -// } -// }, -// }; - -// /** -// * Export an object containing `helpers` -// * -// * In `mongo.ts`, you'll do: -// * import utilities from './utilities'; -// * metricsQuery = utilities.helpers.kafkaMetricsQuery; -// * await utilities.helpers.getGrafanaDatasource(...); -// */ -// export default { -// helpers -// }; -// import axios from 'axios'; -// import { createGrafanaPanelObject, updateGrafanaPanelObject } from './GrafanaPanel'; - -// /** -// * Interfaces for TypeScript -// */ -// interface DatabaseConfig { -// type: 'MongoDB' | 'PostgreSQL'; -// URI: string; -// connection?: 'REST' | 'gRPC'; -// } - -// interface ChronosConfig { -// microservice: string; -// interval: number; -// dockerized?: boolean; -// jmxuri?: string; -// port?: number; -// mode: 'kafka' | 'kubernetes' | 'microservices' | 'docker'; -// promService?: string; -// promPort?: number; -// database: DatabaseConfig; -// notifications?: Array<{ -// type: 'slack' | 'email' | 'sms'; -// settings: any; -// }>; -// containerName?: string; -// grafanaAPIKey?: string; -// } - -// /** A sample interface for metric data */ -// interface MetricData { -// metric: string; -// value: number; -// time: number; -// category: string; -// } - -// /** -// * All utility functions are nested inside 'helpers'. -// * That means in other files: `utilities.helpers.` -// */ -// const helpers = { -// /** Validate config fields and set defaults */ -// validateInput: (config: Partial): ChronosConfig => { -// const { microservice, database, interval, dockerized, jmxuri, mode, promService, promPort } = -// config as ChronosConfig; - -// if (!microservice || typeof microservice !== 'string') { -// throw new Error('Invalid input "microservice": Please provide a name for your microservice'); -// } - -// if (!database?.type || typeof database.type !== 'string') { -// throw new Error('Invalid input "database type": Chronos supports PostgreSQL and MongoDB'); -// } - -// if (!database.URI || typeof database.URI !== 'string') { -// throw new Error('Invalid input "database URI": Please provide the URI to your database'); -// } - -// if (!database.connection || typeof database.connection !== 'string') { -// throw new Error('Invalid "database connection" type: Please provide "REST" or "gRPC"'); -// } - -// const modeTypes = ['kafka', 'kubernetes', 'microservices', 'docker']; -// if (!mode || !modeTypes.includes(mode)) { -// throw new Error( -// 'You must input a mode. Options: "kubernetes", "kafka", "microservices", or "docker"' -// ); -// } - -// if (mode === 'kafka' && jmxuri && typeof jmxuri !== 'string') { -// throw new Error('Invalid input for "jmxuri": Must be a string'); -// } - -// if ((mode === 'kubernetes' || mode === 'docker') && (!promService || !promPort)) { -// throw new Error('Missing "promService" or "promPort" for mode "kubernetes"/"docker"'); -// } - -// if (database.type !== 'PostgreSQL' && database.type !== 'MongoDB') { -// throw new Error(`Invalid database type "${database.type}". Only PostgreSQL and MongoDB.`); -// } -// if (database.connection !== 'REST' && database.connection !== 'gRPC') { -// throw new Error(`Invalid db connection "${database.connection}". Only REST/gRPC supported.`); -// } - -// // Default interval to one minute -// if (!interval || typeof interval !== 'number') config.interval = 60000; -// // Default dockerized to false -// if (dockerized === undefined) config.dockerized = false; - -// return config as ChronosConfig; -// }, - -// /** Configure notifications if user provides them */ -// addNotifications: (config: ChronosConfig): ChronosConfig => { -// const { notifications } = config; -// if (notifications) { -// const validFeatures = ['slack', 'email', 'sms']; -// notifications.forEach(obj => { -// if (!validFeatures.includes(obj.type)) { -// throw new Error(`${obj.type} is not a supported notification method for Chronos`); -// } else { -// // Insert the notification settings onto config -// (config as any)[obj.type] = obj.settings; -// } -// }); -// } -// return config; -// }, - -// /** Return the metrics URI depending on "mode" */ -// getMetricsURI: (config: ChronosConfig): string => { -// if (config.mode === 'kafka') { -// return config.jmxuri || ''; -// } else if (config.mode === 'kubernetes' || config.mode === 'docker') { -// return `http://${config.promService}:${config.promPort}/api/v1/query?query=`; -// } -// throw new Error('Unrecognized mode'); -// }, - -// /** Test if the metrics endpoint is reachable */ -// testMetricsQuery: async (config: ChronosConfig) => { -// const URI = helpers.getMetricsURI(config) + 'up'; -// try { -// // Cast response as any to avoid 'unknown' -// const response = await axios.get(URI); -// if (response.status !== 200) { -// console.error('Invalid response from metrics server:', URI, response.status, response.data); -// } else { -// console.log('Successful initial response from metrics server:', URI); -// } -// return response; -// } catch (error) { -// console.error(error); -// throw new Error('Unable to query metrics server: ' + URI); -// } -// }, - -// /** Fetch Kafka metrics, parse them */ -// kafkaMetricsQuery: async (config: ChronosConfig): Promise => { -// const URI = helpers.getMetricsURI(config); -// try { -// const response = await axios.get(URI); -// return helpers.extractWord(config.mode, response.data ?? ''); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', URI, '\n', error); -// return []; -// } -// }, - -// /** Parse plaintext Kafka metrics (line-based) */ -// extractWord: (mode: string, text: string): MetricData[] => { -// const res: MetricData[] = []; -// const arr = text.split('\n'); -// const time = Date.now(); -// const category = 'Event'; - -// for (const element of arr) { -// if (!element || element.startsWith('#')) continue; -// if (mode === 'kafka' && (element.startsWith('jmx') || element.startsWith("'jmx"))) continue; - -// const lastSpace = element.lastIndexOf(' '); -// const metric = element.slice(0, lastSpace); -// const val = Number(element.slice(lastSpace + 1)); -// if (!isNaN(val)) { -// res.push({ metric, value: val, time, category }); -// } else { -// console.error('Invalid metric (skipped):\n', element); -// } -// } -// return res; -// }, - -// /** Fetch Prometheus metrics, parse them */ -// promMetricsQuery: async (config: ChronosConfig): Promise => { -// const baseURI = helpers.getMetricsURI(config); -// const query = -// config.mode === 'docker' -// ? baseURI + encodeURIComponent(`{__name__=~".+",name="${config.containerName}"}`) -// : baseURI + encodeURIComponent('{__name__=~".+",container=""}'); - -// try { -// // cast as any to avoid TS18046 -// const response = await axios.get(query); -// const result = response.data?.data?.result || []; -// return helpers.parseProm(config, result); -// } catch (error) { -// console.error(config.mode, '|', 'Error fetching from URI:', query, '\n', error); -// return []; -// } -// }, - -// /** Parse the Prometheus "result" array into standardized metrics objects */ -// parseProm: (config: ChronosConfig, data: any[]): MetricData[] => { -// const res: MetricData[] = []; -// const time = Date.now(); -// const category = config.mode === 'docker' ? `${config.containerName}` : 'Event'; - -// const names = new Set(); - -// for (const info of data) { -// let wholeName: string; -// let name: string; - -// if (config.mode === 'docker') { -// if (!info.metric?.name) continue; -// wholeName = info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } else { -// if (!info.metric?.job) continue; -// wholeName = info.metric.job + '/' + info.metric.instance + '/' + info.metric['__name__']; -// name = wholeName.replace(/.*\/.*\//g, ''); -// } - -// if (names.has(name)) continue; -// names.add(name); - -// let value = info.value; -// if (Array.isArray(value)) value = value[1]; // e.g. [timestamp, val] -// if (isNaN(value)) continue; - -// res.push({ -// metric: wholeName, -// value: Number(value), -// time, -// category, -// }); -// } -// console.log('is size equal?', res.length === new Set(res).size); -// return res; -// }, - -// /** Creates a Grafana dashboard via API */ -// createGrafanaDashboard: async ( -// metric: { metric: string }, -// datasource: { type: string; uid: string }, -// graphType: string, -// token: string -// ) => { -// let uid = metric.metric.replace(/.*\/.*\//g, ''); -// if (uid.length >= 40) { -// uid = uid.slice(uid.length - 39); -// } - -// const dashboard = { -// dashboard: { -// id: null, -// uid, -// title: uid, -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as any[], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push(createGrafanaPanelObject(metric, datasource, graphType)); - -// try { -// const dashboardResponse = await axios.post( -// 'http://grafana:3000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// } -// ); -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In createGrafanaDashboard.' -// ); -// } else { -// console.log(`πŸ“Š Grafana graphs for "${metric.metric}" are ready πŸ“Š `); -// } -// } catch (err) { -// console.log('Error creating Grafana dashboard:', err); -// } -// }, - -// /** Fetches an existing datasource from Grafana */ -// getGrafanaDatasource: async (token: string) => { -// console.log('In utilities.getGrafanaDatasource!!!'); -// const datasourceResponse = await axios.get('http://grafana:3000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); - -// // Safe fallback -// const datasource = { -// type: datasourceResponse.data?.[0]?.type ?? 'unknown', -// uid: datasourceResponse.data?.[0]?.uid ?? 'unknown', -// }; -// return datasource; -// }, - -// /** Fetches datasource from a different endpoint, presumably a custom plugin */ -// updateGrafanaDatasource: async (token: string) => { -// const datasourceResponse = await axios.get('http://localhost:32000/api/datasources', { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// }); -// console.log('Successfully fetched datasource from Grafana API'); -// const datasource = { -// type: datasourceResponse.data?.[0]?.type ?? 'unknown', -// uid: datasourceResponse.data?.[0]?.uid ?? 'unknown', -// }; -// console.log('datasource is', datasource); -// return datasource; -// }, - -// /** Updates an existing dashboard in Grafana */ -// updateGrafanaDashboard: async ( -// graphType: string, -// token: string, -// metric: string, -// datasource: any -// ) => { -// let uid = metric.replace(/.*\/.*\//g, ''); -// if (uid.length >= 40) { -// uid = uid.slice(uid.length - 39); -// } - -// const dashboard = { -// dashboard: { -// id: null, -// uid, -// title: uid, -// tags: ['templated'], -// timezone: 'browser', -// schemaVersion: 16, -// version: 0, -// refresh: '10s', -// panels: [] as any[], -// }, -// folderId: 0, -// overwrite: true, -// }; - -// dashboard.dashboard.panels.push(updateGrafanaPanelObject(metric, datasource, graphType)); - -// try { -// const dashboardResponse = await axios.post( -// 'http://localhost:32000/api/dashboards/db', -// JSON.stringify(dashboard), -// { -// headers: { -// 'Content-Type': 'application/json', -// Authorization: token, -// }, -// } -// ); - -// if (dashboardResponse.status >= 400) { -// console.log( -// 'Error with POST request to Grafana Dashboards API. In updateGrafanaDashboard.' -// ); -// } else { -// console.log( -// `πŸ“Š Grafana graphs for "${metric.replace(/.*\/.*\//g, '')}" have been updated!!!` -// ); -// } -// } catch (err) { -// console.log('Error updating Grafana dashboard:', err); -// } -// }, -// }; - -// /** Export an object containing `helpers` */ -// export default { -// helpers, -// }; import axios from 'axios'; import { createGrafanaPanelObject, updateGrafanaPanelObject } from './GrafanaPanel.js'; @@ -2484,4 +393,4 @@ const helpers: Helpers = { /** Export an object containing `helpers` with an explicit type */ const utilities: Utilities = { helpers }; -export default utilities; +export default utilities; \ No newline at end of file diff --git a/chronos_npm_package/dist/chronos.js b/chronos_npm_package/dist/chronos.js index 3b8ce6413..0807367ea 100644 --- a/chronos_npm_package/dist/chronos.js +++ b/chronos_npm_package/dist/chronos.js @@ -336,7 +336,7 @@ import MongoServerWrapper from './wrappers/MongoServerWrapper.js'; import PostgresClientWrapper from './wrappers/PostgresClientWrapper.js'; import PostgresServerWrapper from './wrappers/PostgresServerWrapper.js'; import utilities from './controllers/utilities.js'; -class Chronos { +export class Chronos { constructor(config) { if (!config) { throw new Error('Chronos config is undefined'); @@ -464,5 +464,5 @@ class Chronos { client.metadata = server.metadataHolder; } } -export default Chronos; +// export default Chronos; //# sourceMappingURL=chronos.js.map \ No newline at end of file diff --git a/chronos_npm_package/models/ContainerInfo.ts b/chronos_npm_package/models/ContainerInfo.ts index 631309706..97c0c44f7 100644 --- a/chronos_npm_package/models/ContainerInfo.ts +++ b/chronos_npm_package/models/ContainerInfo.ts @@ -19,10 +19,6 @@ const DockerSchema = new Schema({ }, }); -// module.exports = ContainerName => { -// console.log('Inside Docker Schema ContainerInfo.js LN52', ContainerName) -// return mongoose.model(ContainerName, DockerSchema); -// }; // invalid format for TS files const ContainerName = mongoose.model('ContainerName', DockerSchema); diff --git a/chronos_npm_package/models/HealthModel.ts b/chronos_npm_package/models/HealthModel.ts index c87bf08b7..7d9f27211 100644 --- a/chronos_npm_package/models/HealthModel.ts +++ b/chronos_npm_package/models/HealthModel.ts @@ -18,8 +18,8 @@ const HealthSchema = new Schema({ default: '', }, }); -// module.exports = MicroSrvName => mongoose.model(MicroSrvName, HealthSchema); // invalid format of export for TS files + const MicroSrvName = mongoose.model('MicroSrvName', HealthSchema); -// Export the model + export default MicroSrvName; \ No newline at end of file diff --git a/chronos_npm_package/models/KafkaModel.ts b/chronos_npm_package/models/KafkaModel.ts index 5165b358f..764154728 100644 --- a/chronos_npm_package/models/KafkaModel.ts +++ b/chronos_npm_package/models/KafkaModel.ts @@ -19,7 +19,6 @@ const KafkaSchema = new Schema({ }, }); -// module.exports = mongoose.model('kafkametrics', KafkaSchema); // invalid format for TS files const KafkaModel = mongoose.model('KafkaModel', KafkaSchema); diff --git a/chronos_npm_package/models/KubernetesModel.ts b/chronos_npm_package/models/KubernetesModel.ts index b20d4dbc2..d019aaf03 100644 --- a/chronos_npm_package/models/KubernetesModel.ts +++ b/chronos_npm_package/models/KubernetesModel.ts @@ -19,9 +19,6 @@ const KubernetesSchema = new Schema({ }, }); -// module.exports = mongoose.model('kubernetesmetrics', KubernetesSchema); // invalid format for TS files - - const KubernetesModel = mongoose.model('KubernetesModel', KubernetesSchema); diff --git a/chronos_npm_package/models/MetricsModel.ts b/chronos_npm_package/models/MetricsModel.ts index 1ebf5901c..a617f77d4 100644 --- a/chronos_npm_package/models/MetricsModel.ts +++ b/chronos_npm_package/models/MetricsModel.ts @@ -12,17 +12,14 @@ const MetricsSchema = new Schema({ default: true, }, mode: { - type: String + type: String, }, category: { - type:String - } + type: String, + }, }); -// module.exports = mongoose.model('metrics', MetricsSchema); - - const MetricsModel = mongoose.model('MetricsModel', MetricsSchema); // Export the model -export default MetricsModel; \ No newline at end of file +export default MetricsModel; diff --git a/chronos_npm_package/models/ServicesModel.ts b/chronos_npm_package/models/ServicesModel.ts index 871604372..044ad570d 100644 --- a/chronos_npm_package/models/ServicesModel.ts +++ b/chronos_npm_package/models/ServicesModel.ts @@ -1,4 +1,4 @@ -import mongoose from'mongoose'; +import mongoose from 'mongoose'; const { Schema } = mongoose; @@ -13,8 +13,6 @@ const ServicesSchema = new Schema({ }, }); -// module.exports = mongoose.model('services', ServicesSchema); // invalid format when converted to TS - // Define the model const ServicesModel = mongoose.model('services', ServicesSchema); diff --git a/chronos_npm_package/tsconfig.json b/chronos_npm_package/tsconfig.json index 53dc7ad59..a5b2b5e0b 100644 --- a/chronos_npm_package/tsconfig.json +++ b/chronos_npm_package/tsconfig.json @@ -1,80 +1,26 @@ -// { -// "compilerOptions": { -// "target": "ES2020", -// "noImplicitAny": false, -// "noImplicitThis": true, -// "strictNullChecks": true, -// "strict": false, -// "pretty": true, -// "outDir": "./build", -// "sourceMap": true, -// "module": "NodeNext", //updating module from CommonJS. Olivia suggested at a previous time to use "NodeNext" as CommonJS is outdated -// "allowJs": true, -// "checkJs": false, -// "jsx": "react-jsx", -// "esModuleInterop": true, -// "moduleResolution": "nodenext", //@see https://stackoverflow.com/questions/72638285/cannot-find-module-mongoose-with-typescript -// "resolveJsonModule": true, //! Error: Option '--resolveJsonModule' cannot be specified when 'moduleResolution' is set to 'classic'.ts -// "skipLibCheck": true, -// "noEmitOnError": false, -// "allowSyntheticDefaultImports": true, -// // "types": ["jest", "node", "@testing-library/jest-dom"], -// "types": ["jest", "node"] -// }, -// "exclude": [ -// "node_modules", -// "dist" -// ], -// "include": ["./**/*.ts", "./models/*.ts","src/types/hpropagate.d.ts"] //updated the include to have any .ts extensions in this folder -// } { "compilerOptions": { /* Language / Emit Targets */ "target": "ES2020", - + /* Strictness / Type Checking */ "noImplicitAny": false, "noImplicitThis": true, - "strictNullChecks": true, - "strict": false, // You have this disabled, you can tighten it later if you want + "strict": false, "skipLibCheck": true, - - /* Paths & Outputs */ - /* "outDir": "./build",*/ "sourceMap": true, - "noEmitOnError": false, - /* Module System Setup */ "module": "NodeNext", "moduleResolution": "nodenext", - "rootDir": ".", -"outDir": "dist", - "allowSyntheticDefaultImports": true, + "outDir": "dist", "esModuleInterop": true, - "resolveJsonModule": true, - /* Other Options */ - "jsx": "react-jsx", + "jsx": "react", "allowJs": true, - "checkJs": false, - - /* Type Declarations to Include */ - "types": ["jest", "node"] + "checkJs": false }, - "include": [ - - // "controllers/**/*.ts", - - // "server/**/*.ts", - - // "models/**/*.ts", - - "**/*.ts" - ], + "include": ["**/*.ts"], - "exclude": [ - "node_modules" - /* "dist","build"*/ - ] + "exclude": ["node_modules"] } diff --git a/chronos_npm_package/wrappers/MongoClientWrapper.js b/chronos_npm_package/wrappers/MongoClientWrapper.js new file mode 100644 index 000000000..8431e6892 --- /dev/null +++ b/chronos_npm_package/wrappers/MongoClientWrapper.js @@ -0,0 +1,139 @@ +import mongoose from 'mongoose'; +import grpc from '@grpc/grpc-js'; +import ComModel from '../models/CommunicationModel'; + +async function connect(URI) { + try { + await mongoose.connect(`${URI}`); + // Print success message + console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); + } catch ({ message }) { + // Print error message + console.log('Error connecting to MongoDB:', message); + } +} + +function makeMethods(clientWrapper, client, metadata, names) { + connect(clientWrapper.URI); + for (let i = 0; i < names.length; i++) { + const name = names[i]; + clientWrapper[name] = (message, callback, meta = null) => { + let currentMetadata; + if (meta) { + currentMetadata = meta; + } else { + currentMetadata = clientWrapper.metadata.metadata; + } + const id = currentMetadata.get('id')[0]; + const newComm = { + microservice: clientWrapper.config.microservice, + endpoint: ' ', + request: name, + responsestatus: 0, + responsemessage: ' ', + correlatingid: id, + }; + client[name](message, currentMetadata, (error, response) => { + if (error) { + newComm.responsestatus = error.code; + } + const responseCom = new ComModel(newComm); + responseCom + .save() + .then(() => { + console.log('Request cycle saved'); + }) + .catch(err => console.log(`Error saving communications: `, err.message)); + callback(error, response); + }); + }; + } +} + +class ClientWrapper { + constructor(client, service, userConfig) { + this.URI = userConfig.database.URI; + this.config = userConfig; + this.metadata = {}; + const names = Object.keys(service.service); + makeMethods(this, client, this.metadata, names); + } +} + +export default ClientWrapper; +// import mongoose from 'mongoose'; +// import grpc from '@grpc/grpc-js'; +// import CommunicationModel from '../models/CommunicationModel.js'; + +// async function connect(URI: string) { +// try { +// await mongoose.connect(`${URI}`); +// // Print success message +// console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); +// } catch (error: any) { +// // Print error message +// console.log('Error connecting to MongoDB:', error.message); +// } +// } + +// function makeMethods(clientWrapper: ClientWrapper, client: any, metadata: any, names: string[]) { +// connect(clientWrapper.URI); +// for (let i = 0; i < names.length; i++) { +// const name = names[i]; +// clientWrapper[name] = (message: any, callback: Function, meta: any = null) => { +// let currentMetadata; +// if (meta) { +// currentMetadata = meta; +// } else { +// currentMetadata = clientWrapper.metadata.metadata; +// } +// const id = currentMetadata.get('id')[0]; +// const newComm = { +// microservice: clientWrapper.config.microservice, +// endpoint: ' ', +// request: name, +// responsestatus: 0, +// responsemessage: ' ', +// correlatingid: id, +// }; +// client[name](message, currentMetadata, (error: any, response: any) => { +// if (error) { +// newComm.responsestatus = error.code; +// } +// const responseCom = new CommunicationModel(newComm); +// responseCom +// .save() +// .then(() => { +// console.log('Request cycle saved'); +// }) +// .catch((err: any) => console.log(`Error saving communications: `, err.message)); +// callback(error, response); +// }); +// }; +// } +// } + +// interface DatabaseConfig { +// URI: string; +// } + +// interface UserConfig { +// database: DatabaseConfig; +// microservice: string; +// } + +// class ClientWrapper { +// URI: string; +// config: UserConfig; +// metadata: Record; + +// constructor(client: any, service: any, userConfig: UserConfig) { +// this.URI = userConfig.database.URI; +// this.config = userConfig; +// this.metadata = {}; +// const names = Object.keys(service.service); +// makeMethods(this, client, this.metadata, names); +// } +// } + +// export default ClientWrapper; diff --git a/chronos_npm_package/wrappers/MongoClientWrapper.ts b/chronos_npm_package/wrappers/MongoClientWrapper.ts index 58cbf15ba..80a63b05e 100644 --- a/chronos_npm_package/wrappers/MongoClientWrapper.ts +++ b/chronos_npm_package/wrappers/MongoClientWrapper.ts @@ -1,66 +1,4 @@ -// import mongoose from 'mongoose'; -// import grpc from '@grpc/grpc-js'; -// import ComModel from '../models/CommunicationModel'; -// async function connect(URI) { -// try { -// await mongoose.connect(`${URI}`); -// // Print success message -// console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to MongoDB:', message); -// } -// } - -// function makeMethods(clientWrapper, client, metadata, names) { -// connect(clientWrapper.URI); -// for (let i = 0; i < names.length; i++) { -// const name = names[i]; -// clientWrapper[name] = (message, callback, meta = null) => { -// let currentMetadata; -// if (meta) { -// currentMetadata = meta; -// } else { -// currentMetadata = clientWrapper.metadata.metadata; -// } -// const id = currentMetadata.get('id')[0]; -// const newComm = { -// microservice: clientWrapper.config.microservice, -// endpoint: ' ', -// request: name, -// responsestatus: 0, -// responsemessage: ' ', -// correlatingid: id, -// }; -// client[name](message, currentMetadata, (error, response) => { -// if (error) { -// newComm.responsestatus = error.code; -// } -// const responseCom = new ComModel(newComm); -// responseCom -// .save() -// .then(() => { -// console.log('Request cycle saved'); -// }) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// callback(error, response); -// }); -// }; -// } -// } - -// class ClientWrapper { -// constructor(client, service, userConfig) { -// this.URI = userConfig.database.URI; -// this.config = userConfig; -// this.metadata = {}; -// const names = Object.keys(service.service); -// makeMethods(this, client, this.metadata, names); -// } -// } - -// export default ClientWrapper; import mongoose from 'mongoose'; import grpc from '@grpc/grpc-js'; import CommunicationModel from '../models/CommunicationModel.js'; diff --git a/chronos_npm_package/wrappers/MongoServerWrapper.js b/chronos_npm_package/wrappers/MongoServerWrapper.js new file mode 100644 index 000000000..f120cb105 --- /dev/null +++ b/chronos_npm_package/wrappers/MongoServerWrapper.js @@ -0,0 +1,121 @@ +import mongoose from 'mongoose'; +import grpc from '@grpc/grpc-js'; +import ComModel from '../models/CommunicationModel'; + +async function connect(URI) { + try { + await mongoose.connect(`${URI}`); + // Print success message + console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); + } catch ({ message }) { + // Print error message + console.log('Error connecting to MongoDB:', message); + } +} + +function wrapMethods(server, metadataHolder, methods, userConfig) { + connect(userConfig.database.URI); + const keys = Object.keys(methods); + const wrappedMethods = {}; + for (let i = 0; i < keys.length; i++) { + const name = keys[i]; + wrappedMethods[name] = function (call, callback) { + metadataHolder.metadata = call.metadata; + const id = metadataHolder.metadata.get('id')[0]; + methods[name](call, (error, response) => { + // after server's response has been sent + const newComms = { + microservice: userConfig.microservice, + endpoint: ' ', + request: name, + responsestatus: 0, + responsemessage: ' ', + correlatingid: id, + }; + const communication = new ComModel(newComms); + communication + .save() + .then(() => { + console.log('Request cycle saved'); + }) + .catch(err => console.log(`Error saving communications: `, err.message)); + callback(error, response); + }); + }; + } + return wrappedMethods; +} + +class ServerWrapper { + constructor(server, proto, methods, userConfig) { + this.metadataHolder = {}; + const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig); + server.addService(proto, wrappedMethods); + } +} + +export default ServerWrapper; +// import mongoose from 'mongoose'; +// import grpc from '@grpc/grpc-js'; +// import ComModel from '../models/CommunicationModel.js'; + +// async function connect(URI: string) { +// try { +// await mongoose.connect(`${URI}`); +// // Print success message +// console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); +// } catch (error: any) { +// // Print error message +// console.log('Error connecting to MongoDB:', error.message); +// } +// } + +// function wrapMethods( +// server: any, +// metadataHolder: Record, +// methods: Record, +// userConfig: any +// ) { +// connect(userConfig.database.URI); +// const keys = Object.keys(methods); +// const wrappedMethods: Record = {}; + +// for (const name of keys) { +// wrappedMethods[name] = function (call: any, callback: Function) { +// metadataHolder.metadata = call.metadata; +// const id = metadataHolder.metadata.get('id')[0]; +// methods[name](call, (error: any, response: any) => { +// // After server's response has been sent +// const newComms = { +// microservice: userConfig.microservice, +// endpoint: ' ', +// request: name, +// responsestatus: 0, +// responsemessage: ' ', +// correlatingid: id, +// }; +// const communication = new ComModel(newComms); +// communication +// .save() +// .then(() => { +// console.log('Request cycle saved'); +// }) +// .catch((err: any) => console.log(`Error saving communications: `, err.message)); +// callback(error, response); +// }); +// }; +// } +// return wrappedMethods; +// } + +// class ServerWrapper { +// metadataHolder: Record; + +// constructor(server: any, proto: any, methods: Record, userConfig: any) { +// this.metadataHolder = {}; // βœ… Now explicitly defined and recognized +// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig); +// server.addService(proto, wrappedMethods); +// } +// } + +// export default ServerWrapper; diff --git a/chronos_npm_package/wrappers/MongoServerWrapper.ts b/chronos_npm_package/wrappers/MongoServerWrapper.ts index e1e000d62..946b2dade 100644 --- a/chronos_npm_package/wrappers/MongoServerWrapper.ts +++ b/chronos_npm_package/wrappers/MongoServerWrapper.ts @@ -1,60 +1,3 @@ -// import mongoose from 'mongoose'; -// import grpc from '@grpc/grpc-js'; -// import ComModel from '../models/CommunicationModel'; - -// async function connect(URI) { -// try { -// await mongoose.connect(`${URI}`); -// // Print success message -// console.log(`Chronos MongoDB is connected at ${URI.slice(0, 20)}...`); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to MongoDB:', message); -// } -// } - -// function wrapMethods(server, metadataHolder, methods, userConfig) { -// connect(userConfig.database.URI); -// const keys = Object.keys(methods); -// const wrappedMethods = {}; -// for (let i = 0; i < keys.length; i++) { -// const name = keys[i]; -// wrappedMethods[name] = function (call, callback) { -// metadataHolder.metadata = call.metadata; -// const id = metadataHolder.metadata.get('id')[0]; -// methods[name](call, (error, response) => { -// // after server's response has been sent -// const newComms = { -// microservice: userConfig.microservice, -// endpoint: ' ', -// request: name, -// responsestatus: 0, -// responsemessage: ' ', -// correlatingid: id, -// }; -// const communication = new ComModel(newComms); -// communication -// .save() -// .then(() => { -// console.log('Request cycle saved'); -// }) -// .catch(err => console.log(`Error saving communications: `, err.message)); -// callback(error, response); -// }); -// }; -// } -// return wrappedMethods; -// } - -// class ServerWrapper { -// constructor(server, proto, methods, userConfig) { -// this.metadataHolder = {}; -// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig); -// server.addService(proto, wrappedMethods); -// } -// } - -// export default ServerWrapper; import mongoose from 'mongoose'; import grpc from '@grpc/grpc-js'; import ComModel from '../models/CommunicationModel.js'; diff --git a/chronos_npm_package/wrappers/PostgresClientWrapper.js b/chronos_npm_package/wrappers/PostgresClientWrapper.js new file mode 100644 index 000000000..e22c4c617 --- /dev/null +++ b/chronos_npm_package/wrappers/PostgresClientWrapper.js @@ -0,0 +1,278 @@ +/* eslint-disable no-loop-func */ +const { Client } = require('pg'); +const grpc = require('@grpc/grpc-js'); + + +async function connect(URI, client) { + try { + await client.connect(); + // Print success message + console.log(`Connected to database at ${URI.slice(0, 24)}...`); + client.query( + `CREATE TABLE IF NOT EXISTS grpc_communications( + _id serial PRIMARY KEY, + microservice VARCHAR(248) NOT NULL, + request varchar(32) NOT NULL, + responsestatus INTEGER, + time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + correlatingId varchar(500) + )`, + (err, results) => { + if (err) { + throw err; + } + } + ); + } catch ({ message }) { + // Print error message + console.log('Error connecting to PostgreSQL DB:', message); + } +} + +function makeMethods(clientWrapper, client, metadata, names) { + connect(clientWrapper.URI, clientWrapper.SQLclient); + for (let i = 0; i < names.length; i++) { + const name = names[i]; + clientWrapper[name] = (message, callback, meta = null) => { + let currentMetadata; + if (meta) { + currentMetadata = meta; + } else { + // get metadata from link + currentMetadata = clientWrapper.metadata.metadata; + } + client[name](message, currentMetadata, (error, response) => { + // add status codes here + const queryString = ` + INSERT INTO grpc_communications (microservice, request, responsestatus, correlatingId) + VALUES ($1, $2, $3, $4);`; + const { microservice } = clientWrapper.config.microservice; + const request = name; + let responsestatus = 0; + const correlatingId = currentMetadata.get('id')[0]; + if (error) { + responsestatus = error.code; + } + const values = [microservice, request, responsestatus, correlatingId]; + clientWrapper.SQLclientSQLclient.query(queryString, values, (err, result) => { + if (err) { + throw err; + } + console.log('Request cycle saved'); + }); + callback(error, response); + }); + }; + } +} + +class ClientWrapper { + constructor(client, service, userConfig) { + this.URI = userConfig.database.URI; + this.config = userConfig; + this.metadata = {}; + this.SQLclient = new Client(this.URI); + const names = Object.keys(service.service); + makeMethods(this, client, this.metadata, names); + } +} + +module.exports = ClientWrapper; + +/* eslint-disable no-loop-func */ +// import { Client } from 'pg'; + +// import pkg from 'pg'; +// const { Client } = pkg; +// import grpc from '@grpc/grpc-js'; + +// async function connect(URI: string, client: Client) { +// try { +// await client.connect(); +// // Print success message +// console.log(`Connected to database at ${URI.slice(0, 24)}...`); + +// await client.query(` +// CREATE TABLE IF NOT EXISTS grpc_communications( +// _id serial PRIMARY KEY, +// microservice VARCHAR(248) NOT NULL, +// request VARCHAR(32) NOT NULL, +// responsestatus INTEGER, +// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, +// correlatingId VARCHAR(500) +// ) +// `); +// } catch (error: any) { +// console.log('Error connecting to PostgreSQL DB:', error.message); +// } +// } + +// function makeMethods( +// clientWrapper: ClientWrapper, +// client: any, +// metadata: any, +// names: string[] +// ) { +// connect(clientWrapper.URI, clientWrapper.SQLclient); + +// for (const name of names) { +// clientWrapper[name] = (message: any, callback: Function, meta: any = null) => { +// let currentMetadata = meta || clientWrapper.metadata.metadata; + +// client[name](message, currentMetadata, (error: any, response: any) => { +// // Define query +// const queryString = ` +// INSERT INTO grpc_communications (microservice, request, responsestatus, correlatingId) +// VALUES ($1, $2, $3, $4) +// `; + +// // Fix: Correctly extract microservice name +// const microservice = clientWrapper.config.microservice; +// const request = name; +// let responsestatus = error ? error.code : 0; +// const correlatingId = currentMetadata.get('id')[0]; + +// const values = [microservice, request, responsestatus, correlatingId]; + +// // Fix: Use correct SQL client reference +// clientWrapper.SQLclient.query(queryString, values, (err: any) => { +// if (err) { +// console.error('Error saving request cycle:', err.message); +// } else { +// console.log('Request cycle saved'); +// } +// }); + +// callback(error, response); +// }); +// }; +// } +// } + +// interface DatabaseConfig { +// URI: string; +// } + +// interface UserConfig { +// database: DatabaseConfig; +// microservice: string; +// } + +// class ClientWrapper { +// URI: string; +// config: UserConfig; +// metadata: Record; +// SQLclient: Client; + +// constructor(client: any, service: any, userConfig: UserConfig) { +// this.URI = userConfig.database.URI; +// this.config = userConfig; +// this.metadata = {}; +// this.SQLclient = new Client({ connectionString: this.URI }); // Fixed Client Initialization + +// const names = Object.keys(service.service); +// makeMethods(this, client, this.metadata, names); +// } +// } + +// export default ClientWrapper; + +// // wrappers/PostgresClientWrapper.ts + +// import pkg from 'pg'; +// import type { Client as PgClient } from 'pg'; +// const { Client } = pkg; + +// import grpc from '@grpc/grpc-js'; + +// async function connect(URI: string, client: PgClient) { +// try { +// await client.connect(); +// // Print success message +// console.log(`Connected to database at ${URI.slice(0, 24)}...`); + +// await client.query(` +// CREATE TABLE IF NOT EXISTS grpc_communications( +// _id serial PRIMARY KEY, +// microservice VARCHAR(248) NOT NULL, +// request VARCHAR(32) NOT NULL, +// responsestatus INTEGER, +// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, +// correlatingId VARCHAR(500) +// ) +// `); +// } catch (error: any) { +// console.log('Error connecting to PostgreSQL DB:', error.message); +// } +// } + +// function makeMethods( +// clientWrapper: ClientWrapper, +// client: any, +// metadata: any, +// names: string[] +// ) { +// connect(clientWrapper.URI, clientWrapper.SQLclient); + +// for (const name of names) { +// clientWrapper[name] = (message: any, callback: Function, meta: any = null) => { +// let currentMetadata = meta || clientWrapper.metadata.metadata; + +// client[name](message, currentMetadata, (error: any, response: any) => { +// // Define query +// const queryString = ` +// INSERT INTO grpc_communications (microservice, request, responsestatus, correlatingId) +// VALUES ($1, $2, $3, $4) +// `; + +// // Correctly extract microservice name +// const microservice = clientWrapper.config.microservice; +// const request = name; +// let responsestatus = error ? error.code : 0; +// const correlatingId = currentMetadata.get('id')[0]; + +// const values = [microservice, request, responsestatus, correlatingId]; + +// // Use correct SQL client reference +// clientWrapper.SQLclient.query(queryString, values, (err: any) => { +// if (err) { +// console.error('Error saving request cycle:', err.message); +// } else { +// console.log('Request cycle saved'); +// } +// }); + +// callback(error, response); +// }); +// }; +// } +// } + +// interface DatabaseConfig { +// URI: string; +// } + +// interface UserConfig { +// database: DatabaseConfig; +// microservice: string; +// } + +// class ClientWrapper { +// URI: string; +// config: UserConfig; +// metadata: Record; +// SQLclient: PgClient; + +// constructor(client: any, service: any, userConfig: UserConfig) { +// this.URI = userConfig.database.URI; +// this.config = userConfig; +// this.metadata = {}; +// // Initialize the SQL client using the Client constructor from pg +// this.SQLclient = new Client({ connectionString: this.URI }); + +// const names = Object.keys(service.service); +// makeMethods(this, client, this.metadata, names); +// } +// } + +// export default ClientWrapper; diff --git a/chronos_npm_package/wrappers/PostgresClientWrapper.ts b/chronos_npm_package/wrappers/PostgresClientWrapper.ts index 690c25710..04c4f6f50 100644 --- a/chronos_npm_package/wrappers/PostgresClientWrapper.ts +++ b/chronos_npm_package/wrappers/PostgresClientWrapper.ts @@ -1,184 +1,3 @@ -// /* eslint-disable no-loop-func */ -// import { Client } from 'pg'; -// import grpc from '@grpc/grpc-js'; - - -// async function connect(URI, client) { -// try { -// await client.connect(); -// // Print success message -// console.log(`Connected to database at ${URI.slice(0, 24)}...`); -// client.query( -// `CREATE TABLE IF NOT EXISTS grpc_communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// request varchar(32) NOT NULL, -// responsestatus INTEGER, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', message); -// } -// } - -// function makeMethods(clientWrapper, client, metadata, names) { -// connect(clientWrapper.URI, clientWrapper.SQLclient); -// for (let i = 0; i < names.length; i++) { -// const name = names[i]; -// clientWrapper[name] = (message, callback, meta = null) => { -// let currentMetadata; -// if (meta) { -// currentMetadata = meta; -// } else { -// // get metadata from link -// currentMetadata = clientWrapper.metadata.metadata; -// } -// client[name](message, currentMetadata, (error, response) => { -// // add status codes here -// const queryString = ` -// INSERT INTO grpc_communications (microservice, request, responsestatus, correlatingId) -// VALUES ($1, $2, $3, $4);`; -// const { microservice } = clientWrapper.config.microservice; -// const request = name; -// let responsestatus = 0; -// const correlatingId = currentMetadata.get('id')[0]; -// if (error) { -// responsestatus = error.code; -// } -// const values = [microservice, request, responsestatus, correlatingId]; -// clientWrapper.SQLclientSQLclient.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log('Request cycle saved'); -// }); -// callback(error, response); -// }); -// }; -// } -// } - -// class ClientWrapper { -// constructor(client, service, userConfig) { -// this.URI = userConfig.database.URI; -// this.config = userConfig; -// this.metadata = {}; -// this.SQLclient = new Client(this.URI); -// const names = Object.keys(service.service); -// makeMethods(this, client, this.metadata, names); -// } -// } - -// export default ClientWrapper; - -/* eslint-disable no-loop-func */ -// import { Client } from 'pg'; - -// import pkg from 'pg'; -// const { Client } = pkg; -// import grpc from '@grpc/grpc-js'; - -// async function connect(URI: string, client: Client) { -// try { -// await client.connect(); -// // Print success message -// console.log(`Connected to database at ${URI.slice(0, 24)}...`); - -// await client.query(` -// CREATE TABLE IF NOT EXISTS grpc_communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// request VARCHAR(32) NOT NULL, -// responsestatus INTEGER, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId VARCHAR(500) -// ) -// `); -// } catch (error: any) { -// console.log('Error connecting to PostgreSQL DB:', error.message); -// } -// } - -// function makeMethods( -// clientWrapper: ClientWrapper, -// client: any, -// metadata: any, -// names: string[] -// ) { -// connect(clientWrapper.URI, clientWrapper.SQLclient); - -// for (const name of names) { -// clientWrapper[name] = (message: any, callback: Function, meta: any = null) => { -// let currentMetadata = meta || clientWrapper.metadata.metadata; - -// client[name](message, currentMetadata, (error: any, response: any) => { -// // Define query -// const queryString = ` -// INSERT INTO grpc_communications (microservice, request, responsestatus, correlatingId) -// VALUES ($1, $2, $3, $4) -// `; - -// // Fix: Correctly extract microservice name -// const microservice = clientWrapper.config.microservice; -// const request = name; -// let responsestatus = error ? error.code : 0; -// const correlatingId = currentMetadata.get('id')[0]; - -// const values = [microservice, request, responsestatus, correlatingId]; - -// // Fix: Use correct SQL client reference -// clientWrapper.SQLclient.query(queryString, values, (err: any) => { -// if (err) { -// console.error('Error saving request cycle:', err.message); -// } else { -// console.log('Request cycle saved'); -// } -// }); - -// callback(error, response); -// }); -// }; -// } -// } - -// interface DatabaseConfig { -// URI: string; -// } - -// interface UserConfig { -// database: DatabaseConfig; -// microservice: string; -// } - -// class ClientWrapper { -// URI: string; -// config: UserConfig; -// metadata: Record; -// SQLclient: Client; - -// constructor(client: any, service: any, userConfig: UserConfig) { -// this.URI = userConfig.database.URI; -// this.config = userConfig; -// this.metadata = {}; -// this.SQLclient = new Client({ connectionString: this.URI }); // Fixed Client Initialization - -// const names = Object.keys(service.service); -// makeMethods(this, client, this.metadata, names); -// } -// } - -// export default ClientWrapper; - -// wrappers/PostgresClientWrapper.ts - import pkg from 'pg'; import type { Client as PgClient } from 'pg'; const { Client } = pkg; diff --git a/chronos_npm_package/wrappers/PostgresServerWrapper.js b/chronos_npm_package/wrappers/PostgresServerWrapper.js new file mode 100644 index 000000000..b2b706485 --- /dev/null +++ b/chronos_npm_package/wrappers/PostgresServerWrapper.js @@ -0,0 +1,326 @@ +const { Client } = require('pg'); +const grpc = require('@grpc/grpc-js'); + +let SQLclient; + +async function connect(URI) { + try { + SQLclient = new Client(URI); + await SQLclient.connect(); + // Print success message + console.log(`Connected to database at ${URI.slice(0, 24)}...`); + SQLclient.query( + `CREATE TABLE IF NOT EXISTS communications( + _id serial PRIMARY KEY, + microservice VARCHAR(248) NOT NULL, + request varchar(32) NOT NULL, + responsestatus INTEGER, + time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + correlatingId varchar(500) + )`, + (err, results) => { + if (err) { + throw err; + } + } + ); + } catch ({ message }) { + // Print error message + console.log('Error connecting to PostgreSQL DB:', message); + } +} + +function wrapMethods(server, metadataHolder, methods, userConfig, SQL) { + connect(userConfig.database.URI); + const keys = Object.keys(methods); + const wrappedMethods = {}; + for (let i = 0; i < keys.length; i++) { + const name = keys[i]; + wrappedMethods[name] = (call, callback) => { + metadataHolder.metadata = call.metadata; + methods[name](call, (error, response) => { + const queryString = ` + INSERT INTO grpc_communications (microservice, request, correlatingId) + VALUES ($1, $2, $3);`; + const microservice = userConfig.microservice; + const request = name; + const correlatingId = metadataHolder.metadata.get('id')[0]; + const values = [microservice, request, correlatingId]; + SQL.query(queryString, values, (err, result) => { + if (err) { + throw err; + } + console.log('Request cycle saved'); + }); + // after server's response has been sent + callback(error, response); + }); + }; + } + return wrappedMethods; +} + +class ServerWrapper { + constructor(server, proto, methods, userConfig) { + this.metadataHolder = {}; + const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, SQLclient); + server.addService(proto, wrappedMethods); + } +} + +module.exports = ServerWrapper; + + + + +// import { Client } from 'pg'; +// import grpc from '@grpc/grpc-js'; + +// let SQLclient; + +// async function connect(URI) { +// try { +// SQLclient = new Client(URI); +// await SQLclient.connect(); +// // Print success message +// console.log(`Connected to database at ${URI.slice(0, 24)}...`); +// SQLclient.query( +// `CREATE TABLE IF NOT EXISTS communications( +// _id serial PRIMARY KEY, +// microservice VARCHAR(248) NOT NULL, +// request varchar(32) NOT NULL, +// responsestatus INTEGER, +// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, +// correlatingId varchar(500) +// )`, +// (err, results) => { +// if (err) { +// throw err; +// } +// } +// ); +// } catch ({ message }) { +// // Print error message +// console.log('Error connecting to PostgreSQL DB:', message); +// } +// } + +// function wrapMethods(server, metadataHolder, methods, userConfig, SQL) { +// connect(userConfig.database.URI); +// const keys = Object.keys(methods); +// const wrappedMethods = {}; +// for (let i = 0; i < keys.length; i++) { +// const name = keys[i]; +// wrappedMethods[name] = (call, callback) => { +// metadataHolder.metadata = call.metadata; +// methods[name](call, (error, response) => { +// const queryString = ` +// INSERT INTO grpc_communications (microservice, request, correlatingId) +// VALUES ($1, $2, $3);`; +// const microservice = userConfig.microservice; +// const request = name; +// const correlatingId = metadataHolder.metadata.get('id')[0]; +// const values = [microservice, request, correlatingId]; +// SQL.query(queryString, values, (err, result) => { +// if (err) { +// throw err; +// } +// console.log('Request cycle saved'); +// }); +// // after server's response has been sent +// callback(error, response); +// }); +// }; +// } +// return wrappedMethods; +// } + +// class ServerWrapper { +// metadataHolder:Record; +// constructor(server:any, proto, methods, userConfig) { +// this.metadataHolder = {}; +// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, SQLclient); +// server.addService(proto, wrappedMethods); +// } +// } + +// export default ServerWrapper; +// import { Client } from 'pg'; +// import grpc from '@grpc/grpc-js'; +// import pkg from 'pg'; +// const { Client } = pkg; +// async function connect(URI: string): Promise { +// try { +// const SQLclient = new Client({ connectionString: URI }); +// await SQLclient.connect(); + +// console.log(`Connected to database at ${URI.slice(0, 24)}...`); + +// await SQLclient.query(` +// CREATE TABLE IF NOT EXISTS communications( +// _id serial PRIMARY KEY, +// microservice VARCHAR(248) NOT NULL, +// request VARCHAR(32) NOT NULL, +// responsestatus INTEGER, +// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, +// correlatingId VARCHAR(500) +// ) +// `); + +// return SQLclient; +// } catch (error: any) { +// console.error('Error connecting to PostgreSQL DB:', error.message); +// throw error; +// } +// } + +// function wrapMethods( +// server: any, +// metadataHolder: Record, +// methods: Record, +// userConfig: any, +// SQLclient: Client +// ) { +// const keys = Object.keys(methods); +// const wrappedMethods: Record = {}; + +// for (const name of keys) { +// wrappedMethods[name] = (call: any, callback: Function) => { +// metadataHolder.metadata = call.metadata; + +// methods[name](call, async (error: any, response: any) => { +// try { +// const queryString = ` +// INSERT INTO grpc_communications (microservice, request, correlatingId) +// VALUES ($1, $2, $3)`; + +// const microservice = userConfig.microservice; +// const request = name; +// const correlatingId = metadataHolder.metadata.get('id')?.[0] || 'unknown_id'; + +// const values = [microservice, request, correlatingId]; + +// await SQLclient.query(queryString, values); +// console.log('Request cycle saved'); +// } catch (err: any) { +// console.error('Error saving request cycle:', err.message); +// } + +// callback(error, response); +// }); +// }; +// } +// return wrappedMethods; +// } + +// class ServerWrapper { +// metadataHolder: Record; +// SQLclient: Client; + +// constructor(server: any, proto: any, methods: Record, userConfig: any) { +// this.metadataHolder = {}; + +// connect(userConfig.database.URI) +// .then((SQLclient) => { +// this.SQLclient = SQLclient; +// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, this.SQLclient); +// server.addService(proto, wrappedMethods); +// }) +// .catch((error) => { +// console.error('Failed to initialize SQL client:', error.message); +// }); +// } +// } + +// export default ServerWrapper; +// wrappers/PostgresServerWrapper.ts + +// import grpc from '@grpc/grpc-js'; +// import pkg from 'pg'; +// const { Client } = pkg; +// import type { Client as PgClient } from 'pg'; + +// async function connect(URI: string): Promise { +// try { +// const SQLclient = new Client({ connectionString: URI }); +// await SQLclient.connect(); + +// console.log(`Connected to database at ${URI.slice(0, 24)}...`); + +// await SQLclient.query(` +// CREATE TABLE IF NOT EXISTS communications( +// _id serial PRIMARY KEY, +// microservice VARCHAR(248) NOT NULL, +// request VARCHAR(32) NOT NULL, +// responsestatus INTEGER, +// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, +// correlatingId VARCHAR(500) +// ) +// `); + +// return SQLclient; +// } catch (error: any) { +// console.error('Error connecting to PostgreSQL DB:', error.message); +// throw error; +// } +// } + +// function wrapMethods( +// server: any, +// metadataHolder: Record, +// methods: Record, +// userConfig: any, +// SQLclient: PgClient +// ) { +// const keys = Object.keys(methods); +// const wrappedMethods: Record = {}; + +// for (const name of keys) { +// wrappedMethods[name] = (call: any, callback: Function) => { +// metadataHolder.metadata = call.metadata; + +// methods[name](call, async (error: any, response: any) => { +// try { +// const queryString = ` +// INSERT INTO grpc_communications (microservice, request, correlatingId) +// VALUES ($1, $2, $3)`; + +// const microservice = userConfig.microservice; +// const request = name; +// const correlatingId = metadataHolder.metadata.get('id')?.[0] || 'unknown_id'; + +// const values = [microservice, request, correlatingId]; + +// await SQLclient.query(queryString, values); +// console.log('Request cycle saved'); +// } catch (err: any) { +// console.error('Error saving request cycle:', err.message); +// } + +// callback(error, response); +// }); +// }; +// } +// return wrappedMethods; +// } + +// class ServerWrapper { +// metadataHolder: Record; +// SQLclient: PgClient; + +// constructor(server: any, proto: any, methods: Record, userConfig: any) { +// this.metadataHolder = {}; + +// connect(userConfig.database.URI) +// .then((SQLclient) => { +// this.SQLclient = SQLclient; +// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, this.SQLclient); +// server.addService(proto, wrappedMethods); +// }) +// .catch((error) => { +// console.error('Failed to initialize SQL client:', error.message); +// }); +// } +// } + +// export default ServerWrapper; diff --git a/chronos_npm_package/wrappers/PostgresServerWrapper.ts b/chronos_npm_package/wrappers/PostgresServerWrapper.ts index a28b90b32..4ddc3f054 100644 --- a/chronos_npm_package/wrappers/PostgresServerWrapper.ts +++ b/chronos_npm_package/wrappers/PostgresServerWrapper.ts @@ -1,165 +1,3 @@ -// import { Client } from 'pg'; -// import grpc from '@grpc/grpc-js'; - -// let SQLclient; - -// async function connect(URI) { -// try { -// SQLclient = new Client(URI); -// await SQLclient.connect(); -// // Print success message -// console.log(`Connected to database at ${URI.slice(0, 24)}...`); -// SQLclient.query( -// `CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// request varchar(32) NOT NULL, -// responsestatus INTEGER, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId varchar(500) -// )`, -// (err, results) => { -// if (err) { -// throw err; -// } -// } -// ); -// } catch ({ message }) { -// // Print error message -// console.log('Error connecting to PostgreSQL DB:', message); -// } -// } - -// function wrapMethods(server, metadataHolder, methods, userConfig, SQL) { -// connect(userConfig.database.URI); -// const keys = Object.keys(methods); -// const wrappedMethods = {}; -// for (let i = 0; i < keys.length; i++) { -// const name = keys[i]; -// wrappedMethods[name] = (call, callback) => { -// metadataHolder.metadata = call.metadata; -// methods[name](call, (error, response) => { -// const queryString = ` -// INSERT INTO grpc_communications (microservice, request, correlatingId) -// VALUES ($1, $2, $3);`; -// const microservice = userConfig.microservice; -// const request = name; -// const correlatingId = metadataHolder.metadata.get('id')[0]; -// const values = [microservice, request, correlatingId]; -// SQL.query(queryString, values, (err, result) => { -// if (err) { -// throw err; -// } -// console.log('Request cycle saved'); -// }); -// // after server's response has been sent -// callback(error, response); -// }); -// }; -// } -// return wrappedMethods; -// } - -// class ServerWrapper { -// metadataHolder:Record; -// constructor(server:any, proto, methods, userConfig) { -// this.metadataHolder = {}; -// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, SQLclient); -// server.addService(proto, wrappedMethods); -// } -// } - -// export default ServerWrapper; -// import { Client } from 'pg'; -// import grpc from '@grpc/grpc-js'; -// import pkg from 'pg'; -// const { Client } = pkg; -// async function connect(URI: string): Promise { -// try { -// const SQLclient = new Client({ connectionString: URI }); -// await SQLclient.connect(); - -// console.log(`Connected to database at ${URI.slice(0, 24)}...`); - -// await SQLclient.query(` -// CREATE TABLE IF NOT EXISTS communications( -// _id serial PRIMARY KEY, -// microservice VARCHAR(248) NOT NULL, -// request VARCHAR(32) NOT NULL, -// responsestatus INTEGER, -// time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, -// correlatingId VARCHAR(500) -// ) -// `); - -// return SQLclient; -// } catch (error: any) { -// console.error('Error connecting to PostgreSQL DB:', error.message); -// throw error; -// } -// } - -// function wrapMethods( -// server: any, -// metadataHolder: Record, -// methods: Record, -// userConfig: any, -// SQLclient: Client -// ) { -// const keys = Object.keys(methods); -// const wrappedMethods: Record = {}; - -// for (const name of keys) { -// wrappedMethods[name] = (call: any, callback: Function) => { -// metadataHolder.metadata = call.metadata; - -// methods[name](call, async (error: any, response: any) => { -// try { -// const queryString = ` -// INSERT INTO grpc_communications (microservice, request, correlatingId) -// VALUES ($1, $2, $3)`; - -// const microservice = userConfig.microservice; -// const request = name; -// const correlatingId = metadataHolder.metadata.get('id')?.[0] || 'unknown_id'; - -// const values = [microservice, request, correlatingId]; - -// await SQLclient.query(queryString, values); -// console.log('Request cycle saved'); -// } catch (err: any) { -// console.error('Error saving request cycle:', err.message); -// } - -// callback(error, response); -// }); -// }; -// } -// return wrappedMethods; -// } - -// class ServerWrapper { -// metadataHolder: Record; -// SQLclient: Client; - -// constructor(server: any, proto: any, methods: Record, userConfig: any) { -// this.metadataHolder = {}; - -// connect(userConfig.database.URI) -// .then((SQLclient) => { -// this.SQLclient = SQLclient; -// const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, this.SQLclient); -// server.addService(proto, wrappedMethods); -// }) -// .catch((error) => { -// console.error('Failed to initialize SQL client:', error.message); -// }); -// } -// } - -// export default ServerWrapper; -// wrappers/PostgresServerWrapper.ts - import grpc from '@grpc/grpc-js'; import pkg from 'pg'; const { Client } = pkg; @@ -169,9 +7,9 @@ async function connect(URI: string): Promise { try { const SQLclient = new Client({ connectionString: URI }); await SQLclient.connect(); - + console.log(`Connected to database at ${URI.slice(0, 24)}...`); - + await SQLclient.query(` CREATE TABLE IF NOT EXISTS communications( _id serial PRIMARY KEY, @@ -203,7 +41,7 @@ function wrapMethods( for (const name of keys) { wrappedMethods[name] = (call: any, callback: Function) => { metadataHolder.metadata = call.metadata; - + methods[name](call, async (error: any, response: any) => { try { const queryString = ` @@ -213,7 +51,7 @@ function wrapMethods( const microservice = userConfig.microservice; const request = name; const correlatingId = metadataHolder.metadata.get('id')?.[0] || 'unknown_id'; - + const values = [microservice, request, correlatingId]; await SQLclient.query(queryString, values); @@ -231,18 +69,24 @@ function wrapMethods( class ServerWrapper { metadataHolder: Record; - SQLclient: PgClient; + SQLclient!: PgClient; // Definite assignment assertion constructor(server: any, proto: any, methods: Record, userConfig: any) { this.metadataHolder = {}; - + connect(userConfig.database.URI) - .then((SQLclient) => { + .then(SQLclient => { this.SQLclient = SQLclient; - const wrappedMethods = wrapMethods(server, this.metadataHolder, methods, userConfig, this.SQLclient); + const wrappedMethods = wrapMethods( + server, + this.metadataHolder, + methods, + userConfig, + this.SQLclient + ); server.addService(proto, wrappedMethods); }) - .catch((error) => { + .catch(error => { console.error('Failed to initialize SQL client:', error.message); }); } diff --git a/electron/models/UserModel.ts b/electron/models/UserModel.ts index 283e6516b..13fd2c623 100644 --- a/electron/models/UserModel.ts +++ b/electron/models/UserModel.ts @@ -1,18 +1,21 @@ -import 'dotenv/config';// Imported dotenv to process env files to protect our URI -// Insert the MongoDB URI for your private User database in place of the example URI provided below. -const MONGO_URI = process.env.MONGO_URI;//URI from .env file in electron folder root directory +import mongoose from 'mongoose'; +import 'dotenv/config'; + + +const MONGO_URI = process.env.MONGO_URI; +if (!MONGO_URI) throw new Error("❌ Missing MONGO_URI in environment variables!"); -const mongoose = require('mongoose'); const userDB = mongoose.createConnection(MONGO_URI); + const userSchema = new mongoose.Schema({ username: { type: String, required: true, unique: true }, password: { type: String, required: true }, - email: String, - services: [], + email: { type: String, required: false }, + services: { type: Array, default: [] }, mode: { type: String, default: 'light' }, }); const UserModel = userDB.model('users', userSchema); -module.exports = UserModel; +export { UserModel }; diff --git a/electron/routes/dashboard.ts b/electron/routes/dashboard.ts index 1b5b428d1..9d16c0077 100644 --- a/electron/routes/dashboard.ts +++ b/electron/routes/dashboard.ts @@ -1,4 +1,4 @@ -// import { BrowserWindow, ipcMain, IpcMainEvent } from 'electron'; +// import { BrowserWindow, ipcMain, IpcMainEvent, IpcMainInvokeEvent } from 'electron'; // import moment from 'moment'; // import path from 'path'; // import fs from 'fs'; @@ -63,7 +63,7 @@ // const createdOn = moment().format('lll'); // newApp.push(createdOn); -// //If currentUser is guest, add services to local instance (settings.json) +// // If currentUser is guest, add services to local instance (settings.json) // if (currentUser === 'guest') { // // Retrieves file contents from settings.json // const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); @@ -77,23 +77,17 @@ // // Sync event - return new applications list // message.returnValue = services.map((arr: string[]) => [...arr]); - -// // Else user is logged in, find user information in DB and add newApp to list of applications // } else { -// //Updating DB by pushing newApp into services array +// // Updating DB by pushing newApp into services array // return User.findOneAndUpdate( // { username: currentUser }, -// { -// $push: { services: newApp }, -// }, +// { $push: { services: newApp } }, // { new: true } // ) - // .then(data => { // // console.log('User updated', data); // message.returnValue = data.services.map(arr => [...arr]); // }) - // .catch(error => { // console.log(`addApp failed : ${error}`); // }); @@ -102,8 +96,7 @@ // /** // * @event addAwsApp -// * @param name, 'AWS', region, description, typeOfService, instanceID, accessKey, secretAccessKey, awsURL -// * @desc If guest user, adds an AWS application to the user's list in the settings.json with the provided fields +// * @desc If guest user, adds an AWS application to the user's list in the settings.json with the provided fields. // * If user is logged in, makes an update query request to MongoDB to add an AWS application to the services array under corresponding user document. // * @return New list of applications // */ @@ -114,13 +107,11 @@ // const createdOn = moment().format('lll'); // newAwsApp.push(createdOn); -// //If user is logged in, find user information in DB and add newAwsApp to list of applications +// // If user is logged in, find user information in DB and add newAwsApp to list of applications // if (currentUser !== 'guest') { // return User.findOneAndUpdate( // { username: currentUser }, -// { -// $push: { services: newAwsApp }, -// }, +// { $push: { services: newAwsApp } }, // { new: true } // ) // .then(data => { @@ -138,7 +129,7 @@ // console.log(`addAWSApp failed : ${error}`); // }); // } else { -// // if user is not logged in, should not have to pull info from settings.json file +// // If user is not logged in, should not have to pull info from settings.json file // // console.log('current user is a guest, data will be saved locally...'); // // Retrieves file contents from settings.json // const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); @@ -162,30 +153,24 @@ // * @desc Retrieves the existing list of applications belonging to the user and current user setting for mode of preference // * @return Returns the list of applications // */ -// // Returns updated state back to the render process on ipc 'dashboard' request // ipcMain.on('getApps', (message: IpcMainEvent) => { // // Retrieves file contents from settings.json for current Apps // const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); -// // const services: string[][] = settings[currentUser].services; // let services: string[][] = settings['guest'].services; // temporarily set to guests at every login attempt -// //If user is guest +// // If user is guest // if (currentUser === 'guest') { // services = settings['guest'].services; // const dashboardList: string[][] = services.map((arr: string[]) => [...arr]); // message.returnValue = dashboardList; - -// //If user is not logged in // } else { -// //Find and return services listed under logged in user +// // Find and return services listed under logged in user // return User.findOne({ username: currentUser }) // .then(data => { -// // console.log('User found', data); // services = data.services; // const dashboardList: string[][] = services.map((arr: string[]) => [...arr]); // message.returnValue = dashboardList; // }) - // .catch(error => { // // console.log(`checkUser failed : ${error}`); // }); @@ -194,19 +179,18 @@ // /** // * @event deleteApp -// * @desc If guest user, deletes the desired application from settings.json which is located with the provided index -// * If user is logged in, makes an update query request to mongoDB to delete the desired application in services array -// * @return Returns the new list of applications +// * @desc If guest user, deletes the desired application from settings.json which is located with the provided index. +// * If user is logged in, makes an update query request to mongoDB to delete the desired application in services array. +// * @return Returns the new list of applications. // */ -// ipcMain.on('deleteApp', (message: IpcMainEvent, index:number, action:string) => { -// //If user is not logged in +// ipcMain.on('deleteApp', (message: IpcMainEvent, index: number, action: string) => { +// // If user is not logged in // if (currentUser === 'guest') { -// // Retrives file contents from settings.json // const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); // let guestServices = settings[currentUser].services; // // Remove application from settings.json -// if(action === 'all') guestServices.splice(0); +// if (action === 'all') guestServices.splice(0); // else guestServices.splice(index, 1); // // Update settings.json with new list @@ -216,37 +200,26 @@ // // Sync event - return new applications list // message.returnValue = guestServices.map((arr: string[]) => [...arr]); -// } - -// //If user is logged in -// else { +// } else { // return User.findOne({ username: currentUser }) - // .then(data => { -// // console.log('User found', data); // const service = data.services[index]; -// // Delete service from services array in corresponding user's document in mongoDB +// // Delete service from services array in corresponding user's document in MongoDB // return User.findOneAndUpdate( // { username: currentUser }, -// { -// $pull: { services: service }, -// }, +// { $pull: { services: service } }, // { new: true } // ) - // .then(data => { -// // console.log('Service deleted', data); // message.returnValue = data.services.map(arr => [...arr]); // }) - // .catch(error => { // console.log(`addApp failed : ${error}`); // }); // }) // .catch(error => { // console.log(`checkUser failed : ${error}`); -// // return false; // }); // } // }); @@ -254,88 +227,61 @@ // // v10 note: have not yet been updated in DB // /** // * @event changeMode -// * @desc Changes user's mode/theme preference fron settings.json -// * @return Returns the newly update setting preference of the app to the renderer end +// * @desc Changes user's mode/theme preference from settings.json. +// * @return Returns the newly updated setting preference of the app to the renderer end. // */ -// // Loads existing setting JSON and update settings to include updated mode version // ipcMain.on('changeMode', async (message: IpcMainEvent, currMode: string) => { -// // Retrieves file contents from settings.json // if (currentUser === 'guest') { // const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); // const userSettings = settings[currentUser]; // userSettings.mode = currMode; - -// // Update settings.json with new mode // fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); // } else { // try { -// // console.log('Should be in here when logged in'); // await User.findOneAndUpdate({ userName: currentUser }, { $set: { mode: currMode } }); // } catch (err) { // console.log('Error in changeMode ', err); // } // } - -// // Sync event - return new mode // message.returnValue = currMode; // }); // /** // * @event addUser -// * @desc Checks if username already exists. If not, invokes addUser() to create new User document in mongoDB -// * @return Returns a boolean to the renderer end to signify if addUser() was invoked based on whether username already exists in DB +// * @desc Checks if username already exists. If not, invokes addUser() to create new User document in MongoDB. +// * @return Returns a boolean to the renderer to signify if addUser() was invoked. // */ - // ipcMain.handle( // 'addUser', -// (message: IpcMainEvent, user: { username: string; password: string; email: string }) => { +// async (event: IpcMainInvokeEvent, user: { username: string; password: string; email: string }) => { // const { username, password, email } = user; -// // console.log('in ipcMainhandle', user); - -// // checks if username exist in DB, if not, addUser is invoked -// return User.findOne({ username: username }) - -// .then(data => { -// // console.log('User found', data); - -// if (data) { -// message.returnValue = false; -// return message.returnValue; -// } else { -// addUser(username, password, email); -// message.returnValue = true; -// return message.returnValue; -// } -// }) - -// .catch(error => { -// console.log(`checkUser failed : ${error}`); -// }); +// try { +// const data = await User.findOne({ username: username }); +// if (data) { +// return false; +// } else { +// await addUser(username, password, email); +// return true; +// } +// } catch (error) { +// console.log(`checkUser failed : ${error}`); +// throw error; +// } // } // ); // /** // * @event login -// * @desc Checks if username and password matches what's in DB. If yes, reassign currentUsername and sends mode to renderer end. If not, sends boolean 'false' to renderer end to signify credentials not found or does not match. -// * @return Returns the mode string, representing user's mode OR boolean 'false', representing credentials not found or does not match. +// * @desc Checks if username and password match what's in DB. +// * If yes, reassigns currentUser and returns user's mode. +// * Otherwise returns false. // */ - // ipcMain.on('login', (message: IpcMainEvent, user: { username: string; password: string }) => { // const { username, password } = user; - -// //Checks if user exists in DB // return User.findOne({ username: username }) // .then(data => { -// // console.log(data.username, ' is being logged in...'); - -// //Checks if user is found and password matches // if (data !== null && bcrypt.compareSync(password, data.password)) { -// // console.log('Login was successful.'); -// // console.log('returned data: ', data); -// // console.log('found data', data.mode); // currentUser = username; - -// // returnValue being set to mode, returned as string. // message.returnValue = data.mode; // return message.returnValue; // } else { @@ -345,16 +291,14 @@ // }) // .catch(error => { // console.log(`checkUser failed : ${error}`); -// // return false; // }); // }); // /** // * @event signOut -// * @desc Logs out user and reassigns currentUser to 'guest' -// * @return Returns boolean true +// * @desc Logs out user and reassigns currentUser to 'guest'. +// * @return Returns boolean true. // */ - // ipcMain.on('signOut', (message: IpcMainEvent) => { // currentUser = 'guest'; // message.returnValue = true; @@ -366,307 +310,225 @@ import { BrowserWindow, ipcMain, IpcMainEvent, IpcMainInvokeEvent } from 'electr import moment from 'moment'; import path from 'path'; import fs from 'fs'; -const bcrypt = require('bcrypt'); +import bcrypt from 'bcrypt'; // βœ… Fix bcrypt import for ES Modules +import mongoose from 'mongoose'; +import { UserModel } from '../models/UserModel.js'; // βœ… Named import for ES modules + + +// πŸ”Ή Constants const saltRounds = 12; -const User = require('../models/UserModel'); -const mongoose = require('mongoose'); +let currentUser: string = 'guest'; +const settingsLocation: string = path.resolve(__dirname, '../../settings.json'); + +// βœ… Interfaces for TypeScript +interface UserCredentials { + username: string; + password: string; + email?: string; +} -// GLOBAL VARIABLES -// currentUser is defaulted to 'guest' -// When user logs in or signs up with valid credentials, currentUser will be reassigned. -let currentUser = 'guest'; -const settingsLocation = path.resolve(__dirname, '../../settings.json'); +interface AwsApp { + name: string; + region: string; + instance: string; +} -/** - * @event hashPassword - * @desc hashes password provided when user signs up. - * @return {string} bcrypt hashed password - */ -function hashPassword(password: string) { +// πŸ”Ή **Hash User Password** +function hashPassword(password: string): string { const salt = bcrypt.genSaltSync(saltRounds); return bcrypt.hashSync(password, salt); } -// Function to create new User with client's inputted data and saving into DB -/** - * @event addUser - * @desc adds a new user to the user database - */ -function addUser(username, password, email) { - // console.log('Creating new User', username); - const newUser = new User({ username: username, password: hashPassword(password), email: email }); - - // Saving new User into DB - newUser.save().then(data => { - // console.log('data saved', data); - }); -} +// πŸ”Ή **Create a New User** +async function addUser(username: string, password: string, email: string): Promise { + const newUser = new UserModel({ username, password: hashPassword(password), email }); -/** - * @event clearGuestSettings - * @desc - */ -function clearGuestSettings() { - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - // Guest Settings will be an array of length 1 with one object inside - settings.guest.services = []; - settings.guest.mode = 'light'; - fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + try { + await newUser.save(); + console.log(`βœ… User '${username}' successfully added.`); + } catch (error) { + console.error(`❌ Error creating user:`, error); + } } -/** - * @event addApp - * @desc If guest user, adds an application to the user's list in the settings.json with the provided fields. - * If user is logged in, makes an update query request to MongoDB to add an application to the services array under corresponding user document. - * @return New array of applications - */ -ipcMain.on('addApp', (message: IpcMainEvent, application: any) => { - const newApp = JSON.parse(application); - // console.log('parsed newApp: ', newApp); - // console.log('currentUser', currentUser); - const createdOn = moment().format('lll'); - newApp.push(createdOn); - - // If currentUser is guest, add services to local instance (settings.json) - if (currentUser === 'guest') { - // Retrieves file contents from settings.json - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - const services = settings[currentUser].services; - - // Add app to list of applications - services.push(newApp); - - // Update settings.json with new list +// πŸ”Ή **Clear Guest Settings** +function clearGuestSettings(): void { + try { + const settingsData = fs.readFileSync(settingsLocation, 'utf8'); + if (!settingsData) { + console.warn("⚠️ Settings file is empty!"); + return; + } + const settings = JSON.parse(settingsData); + + settings.guest.services = []; + settings.guest.mode = 'light'; + fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + } catch (error) { + console.error(`❌ Error clearing guest settings:`, error); + } +} - // Sync event - return new applications list - message.returnValue = services.map((arr: string[]) => [...arr]); - } else { - // Updating DB by pushing newApp into services array - return User.findOneAndUpdate( - { username: currentUser }, - { $push: { services: newApp } }, - { new: true } - ) - .then(data => { - // console.log('User updated', data); - message.returnValue = data.services.map(arr => [...arr]); - }) - .catch(error => { - console.log(`addApp failed : ${error}`); - }); +// πŸ”Ή **Helper: Read JSON Safely** +function safeReadJSON(filePath: string): any { + try { + const rawData = fs.readFileSync(filePath, 'utf8'); + return rawData ? JSON.parse(rawData) : {}; + } catch (error) { + console.error(`❌ Failed to read JSON file (${filePath}):`, error); + return {}; } -}); +} /** - * @event addAwsApp - * @desc If guest user, adds an AWS application to the user's list in the settings.json with the provided fields. - * If user is logged in, makes an update query request to MongoDB to add an AWS application to the services array under corresponding user document. - * @return New list of applications + * πŸ“Œ **Add an Application (Electron IPC Event)** */ -ipcMain.on('addAwsApp', (message: IpcMainEvent, application: any) => { - const newAwsApp = JSON.parse(application); - // console.log('parsed newApp: ', newAwsApp); - // console.log('currentUser', currentUser); - const createdOn = moment().format('lll'); - newAwsApp.push(createdOn); - - // If user is logged in, find user information in DB and add newAwsApp to list of applications - if (currentUser !== 'guest') { - return User.findOneAndUpdate( - { username: currentUser }, - { $push: { services: newAwsApp } }, - { new: true } - ) - .then(data => { - // console.log('User updated', data); - // returning each array element name, 'AWS', region, 'AWS/(instance)', Date - message.returnValue = data.services.map((arr: string[]) => [ - arr[0], - arr[1], - arr[2], - arr[4], - arr[5], - ]); - }) - .catch(error => { - console.log(`addAWSApp failed : ${error}`); - }); - } else { - // If user is not logged in, should not have to pull info from settings.json file - // console.log('current user is a guest, data will be saved locally...'); - // Retrieves file contents from settings.json - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - const services = settings[currentUser].services; - - newAwsApp.splice(5, 0, createdOn); - - // Add app to list of applications - services.push(newAwsApp); - - // Update settings.json with new list - fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); - - // Sync event - return new applications list - message.returnValue = services.map((arr: string[]) => [arr[0], arr[1], arr[2], arr[4], arr[5]]); +ipcMain.on('addApp', async (event: IpcMainEvent, application: string) => { + try { + const newApp = JSON.parse(application); + newApp.push(moment().format('lll')); // Append createdOn timestamp + + if (currentUser === 'guest') { + // Guest User: Store in Local Settings + const settings = safeReadJSON(settingsLocation); + settings.guest.services.push(newApp); + fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + + event.returnValue = settings.guest.services; + } else { + // Logged-in User: Store in MongoDB + const updatedUser = await UserModel.findOneAndUpdate( + { username: currentUser }, + { $push: { services: newApp } }, + { new: true } + ); + + event.returnValue = updatedUser?.services || []; + } + } catch (error) { + console.error(`❌ Error in addApp:`, error); + event.returnValue = []; } }); /** - * @event getApps - * @desc Retrieves the existing list of applications belonging to the user and current user setting for mode of preference - * @return Returns the list of applications + * πŸ“Œ **Add AWS Application (Electron IPC Event)** */ -ipcMain.on('getApps', (message: IpcMainEvent) => { - // Retrieves file contents from settings.json for current Apps - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - let services: string[][] = settings['guest'].services; // temporarily set to guests at every login attempt - - // If user is guest - if (currentUser === 'guest') { - services = settings['guest'].services; - const dashboardList: string[][] = services.map((arr: string[]) => [...arr]); - message.returnValue = dashboardList; - } else { - // Find and return services listed under logged in user - return User.findOne({ username: currentUser }) - .then(data => { - services = data.services; - const dashboardList: string[][] = services.map((arr: string[]) => [...arr]); - message.returnValue = dashboardList; - }) - .catch(error => { - // console.log(`checkUser failed : ${error}`); - }); +ipcMain.on('addAwsApp', async (event: IpcMainEvent, application: string) => { + try { + const newAwsApp: AwsApp = JSON.parse(application); + newAwsApp.instance += ` - Created on: ${moment().format('lll')}`; + + if (currentUser === 'guest') { + const settings = safeReadJSON(settingsLocation); + settings.guest.services.push(newAwsApp); + fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + + event.returnValue = settings.guest.services; + } else { + const updatedUser = await UserModel.findOneAndUpdate( + { username: currentUser }, + { $push: { services: newAwsApp } }, + { new: true } + ); + + event.returnValue = updatedUser?.services || []; + } + } catch (error) { + console.error(`❌ Error in addAwsApp:`, error); + event.returnValue = []; } }); /** - * @event deleteApp - * @desc If guest user, deletes the desired application from settings.json which is located with the provided index. - * If user is logged in, makes an update query request to mongoDB to delete the desired application in services array. - * @return Returns the new list of applications. + * πŸ“Œ **Retrieve User Applications** */ -ipcMain.on('deleteApp', (message: IpcMainEvent, index: number, action: string) => { - // If user is not logged in - if (currentUser === 'guest') { - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - let guestServices = settings[currentUser].services; - - // Remove application from settings.json - if (action === 'all') guestServices.splice(0); - else guestServices.splice(index, 1); - - // Update settings.json with new list - fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t'), { - encoding: 'utf8', - }); - - // Sync event - return new applications list - message.returnValue = guestServices.map((arr: string[]) => [...arr]); - } else { - return User.findOne({ username: currentUser }) - .then(data => { - const service = data.services[index]; - - // Delete service from services array in corresponding user's document in MongoDB - return User.findOneAndUpdate( - { username: currentUser }, - { $pull: { services: service } }, - { new: true } - ) - .then(data => { - message.returnValue = data.services.map(arr => [...arr]); - }) - .catch(error => { - console.log(`addApp failed : ${error}`); - }); - }) - .catch(error => { - console.log(`checkUser failed : ${error}`); - }); +ipcMain.on('getApps', async (event: IpcMainEvent) => { + try { + if (currentUser === 'guest') { + const settings = safeReadJSON(settingsLocation); + event.returnValue = settings.guest.services; + } else { + const user = await UserModel.findOne({ username: currentUser }); + event.returnValue = user?.services || []; + } + } catch (error) { + console.error(`❌ Error in getApps:`, error); + event.returnValue = []; } }); -// v10 note: have not yet been updated in DB /** - * @event changeMode - * @desc Changes user's mode/theme preference from settings.json. - * @return Returns the newly updated setting preference of the app to the renderer end. + * πŸ“Œ **Delete Application** */ -ipcMain.on('changeMode', async (message: IpcMainEvent, currMode: string) => { - if (currentUser === 'guest') { - const settings = JSON.parse(fs.readFileSync(settingsLocation).toString('utf8')); - const userSettings = settings[currentUser]; - userSettings.mode = currMode; - fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); - } else { - try { - await User.findOneAndUpdate({ userName: currentUser }, { $set: { mode: currMode } }); - } catch (err) { - console.log('Error in changeMode ', err); +ipcMain.on('deleteApp', async (event: IpcMainEvent, index: number, action: string) => { + try { + if (currentUser === 'guest') { + const settings = safeReadJSON(settingsLocation); + if (action === 'all') settings.guest.services = []; + else settings.guest.services.splice(index, 1); + fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + event.returnValue = settings.guest.services; + } else { + const user = await UserModel.findOne({ username: currentUser }); + if (user) { + user.services.splice(index, 1); + await user.save(); + event.returnValue = user.services; + } else { + event.returnValue = []; + } } + } catch (error) { + console.error(`❌ Error in deleteApp:`, error); + event.returnValue = []; } - message.returnValue = currMode; }); /** - * @event addUser - * @desc Checks if username already exists. If not, invokes addUser() to create new User document in MongoDB. - * @return Returns a boolean to the renderer to signify if addUser() was invoked. + * πŸ“Œ **Change Mode (Dark/Light)** */ -ipcMain.handle( - 'addUser', - async (event: IpcMainInvokeEvent, user: { username: string; password: string; email: string }) => { - const { username, password, email } = user; - try { - const data = await User.findOne({ username: username }); - if (data) { - return false; - } else { - await addUser(username, password, email); - return true; - } - } catch (error) { - console.log(`checkUser failed : ${error}`); - throw error; +ipcMain.on('changeMode', async (event: IpcMainEvent, currMode: string) => { + try { + if (currentUser === 'guest') { + const settings = safeReadJSON(settingsLocation); + settings.guest.mode = currMode; + fs.writeFileSync(settingsLocation, JSON.stringify(settings, null, '\t')); + } else { + await UserModel.findOneAndUpdate({ username: currentUser }, { $set: { mode: currMode } }); } + event.returnValue = currMode; + } catch (error) { + console.error(`❌ Error in changeMode:`, error); + event.returnValue = 'light'; } -); +}); /** - * @event login - * @desc Checks if username and password match what's in DB. - * If yes, reassigns currentUser and returns user's mode. - * Otherwise returns false. + * πŸ“Œ **User Login** */ -ipcMain.on('login', (message: IpcMainEvent, user: { username: string; password: string }) => { - const { username, password } = user; - return User.findOne({ username: username }) - .then(data => { - if (data !== null && bcrypt.compareSync(password, data.password)) { - currentUser = username; - message.returnValue = data.mode; - return message.returnValue; - } else { - message.returnValue = false; - return message.returnValue; - } - }) - .catch(error => { - console.log(`checkUser failed : ${error}`); - }); +ipcMain.on('login', async (event: IpcMainEvent, user: UserCredentials) => { + try { + const foundUser = await UserModel.findOne({ username: user.username }); + if (foundUser && bcrypt.compareSync(user.password, foundUser.password)) { + currentUser = user.username; + event.returnValue = foundUser.mode; + } else { + event.returnValue = false; + } + } catch (error) { + console.error(`❌ Error in login:`, error); + event.returnValue = false; + } }); /** - * @event signOut - * @desc Logs out user and reassigns currentUser to 'guest'. - * @return Returns boolean true. + * πŸ“Œ **User Sign-Out** */ -ipcMain.on('signOut', (message: IpcMainEvent) => { +ipcMain.on('signOut', (event: IpcMainEvent) => { currentUser = 'guest'; - message.returnValue = true; - return; + event.returnValue = true; }); export { clearGuestSettings }; diff --git a/examples/microservices/auth/package.json b/examples/microservices/auth/package.json index 968f6bd66..4b39d90ee 100644 --- a/examples/microservices/auth/package.json +++ b/examples/microservices/auth/package.json @@ -5,14 +5,7 @@ "main": "index.ts", "type": "module", "scripts": { - -"start": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' ./src/index.ts", - - - - - - + "start": "node --import 'data:text/javascript,import { register } from \"node:module\"; import { pathToFileURL } from \"node:url\"; register(\"ts-node/esm\", pathToFileURL(\"./\"));' ./src/index.ts", "dev": "ts-node-dev ./src/index.ts", "test": "jest --watchAll --no-cache" }, diff --git a/examples/microservices/auth/src/__test__/login.test.ts b/examples/microservices/auth/src/__test__/login.test.ts index 104c6f1ba..a28cc6599 100644 --- a/examples/microservices/auth/src/__test__/login.test.ts +++ b/examples/microservices/auth/src/__test__/login.test.ts @@ -1,76 +1,3 @@ -// import request from 'supertest'; -// import { app } from '../app.js'; - -// // Mongo Memory Server - Users collection always starts out empty** - -// // 1) Fails with bad request error either username or password are not provided -// it('fails if either username or password are not provided', async () => { -// await request(app) -// .post('/api/auth/login') -// .send({ -// username: 'AYYYYY', -// }) -// .expect(400); - -// await request(app) -// .post('/api/auth/login') -// .send({ -// password: 'CUH', -// }) -// .expect(400); -// }); - -// // 2) Fails with bad request error if user does not exist in the database -// it('user does not exist', async () => { -// await request(app) -// .post('/api/auth/login') -// .send({ -// username: 'nonexistentuser', -// password: 'test', -// }) -// .expect(400); -// }); - -// // 3) Fails with BadRequest Error if passwords do not match -// it('fails if passwords do not match', async () => { -// await request(app) -// .post('/api/auth/signup') -// .send({ -// username: 'test', -// password: 'rightTest', -// }) -// .expect(201); - -// await request(app) -// .post('/api/auth/login') -// .send({ -// username: 'test', -// password: 'wrongTest', -// }) -// .expect(400); -// }); - -// // 4) Succeeds if username and password match a user in the database -// it('Users has correct password', async () => { -// await request(app) -// .post('/api/auth/signup') -// .send({ -// username: 'test', -// password: 'test123', -// }) -// .expect(201); - -// const response = await request(app) -// .post('/api/auth/login') -// .send({ -// username: 'test', -// password: 'test123', -// }) -// .expect(200); - -// expect(response.get('Set-Cookie')).toBeDefined(); -// expect(response.get('Set-Cookie')[0].split('=')[0]).toEqual('token'); -// }); import request from 'supertest'; import { app } from '../app.js'; diff --git a/examples/microservices/auth/src/__test__/signup.test.ts b/examples/microservices/auth/src/__test__/signup.test.ts index 7126f01f9..e73c7bcfa 100644 --- a/examples/microservices/auth/src/__test__/signup.test.ts +++ b/examples/microservices/auth/src/__test__/signup.test.ts @@ -1,185 +1,3 @@ -// // // import request from 'supertest'; -// // // import { app } from '../app'; -// // // import { User } from '../models/user'; - -// // // it('fails with 400 if no username/password provided', async () => { -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // password: 'wheremyname', -// // // }) -// // // .expect(400); - -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'wheremypassword', -// // // }) -// // // .expect(400); -// // // }); - -// // // it('fails with 400 with invalid password', async () => { -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'validuser', -// // // password: 'b', -// // // }) -// // // .expect(400); -// // // }); - -// // // it('Does not allow duplicate username signup ', async () => { -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'test', -// // // password: 'test', -// // // }) -// // // .expect(201); - -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'test', -// // // password: 'test', -// // // }) -// // // .expect(400); -// // // }); - -// // // it('creates a user with valid inputs', async () => { -// // // await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'test', -// // // password: 'test', -// // // }) -// // // .expect(201); - -// // // const users = await User.find({}); -// // // expect(users[0].username).toEqual('test'); -// // // }); - -// // // it('Sets a cookie on successful signup', async () => { -// // // const response = await request(app) -// // // .post('/api/auth/signup') -// // // .send({ -// // // username: 'test', -// // // password: 'test', -// // // }) -// // // .expect(201); - -// // // expect(response.get('Set-Cookie')).toBeDefined(); -// // // expect(response.get('Set-Cookie')[0].split('=')[0]).toEqual('token'); -// // // }); -// // import request from 'supertest'; -// // import { app } from '../app'; -// // import { User } from '../models/user'; - -// // it('fails with 400 if no username/password provided', async () => { -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ password: 'wheremyname' }) as any) -// // .expect(400); - -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'wheremypassword' }) as any) -// // .expect(400); -// // }); - -// // it('fails with 400 with invalid password', async () => { -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'validuser', password: 'b' }) as any) -// // .expect(400); -// // }); - -// // it('Does not allow duplicate username signup ', async () => { -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'test', password: 'test' }) as any) -// // .expect(201); - -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'test', password: 'test' }) as any) -// // .expect(400); -// // }); - -// // it('creates a user with valid inputs', async () => { -// // await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'test', password: 'test' }) as any) -// // .expect(201); - -// // const users = await User.find({}); -// // expect(users[0].username).toEqual('test'); -// // }); - -// // it('Sets a cookie on successful signup', async () => { -// // const response = await (request(app) -// // .post('/api/auth/signup') -// // .send({ username: 'test', password: 'test' }) as any) -// // .expect(201); - -// // expect(response.get('Set-Cookie')).toBeDefined(); -// // expect(response.get('Set-Cookie')[0].split('=')[0]).toEqual('token'); -// // }); - -// import request from 'supertest'; -// import { app } from '../app'; -// import { User } from '../models/user'; - -// it('fails with 400 if no username/password provided', async () => { -// await (request(app) -// .post('/api/auth/signup') -// .send({ password: 'wheremyname' }) as any) -// .expect(400); - -// await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'wheremypassword' }) as any) -// .expect(400); -// }); - -// it('fails with 400 with invalid password', async () => { -// await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'validuser', password: 'b' }) as any) -// .expect(400); -// }); - -// it('Does not allow duplicate username signup', async () => { -// await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'test', password: 'test' }) as any) -// .expect(201); - -// await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'test', password: 'test' }) as any) -// .expect(400); -// }); - -// it('creates a user with valid inputs', async () => { -// await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'test', password: 'test' }) as any) -// .expect(201); - -// const users = await User.find({}); -// expect(users[0].username).toEqual('test'); -// }); - -// it('Sets a cookie on successful signup', async () => { -// const response = await (request(app) -// .post('/api/auth/signup') -// .send({ username: 'test', password: 'test' }) as any) -// .expect(201); - -// expect(response.get('Set-Cookie')).toBeDefined(); -// expect(response.get('Set-Cookie')[0].split('=')[0]).toEqual('token'); -// }); import request from 'supertest'; import { app } from '../app.js'; import { User } from '../models/user.js'; diff --git a/examples/microservices/auth/src/app.ts b/examples/microservices/auth/src/app.ts index a37b14061..df124ce31 100644 --- a/examples/microservices/auth/src/app.ts +++ b/examples/microservices/auth/src/app.ts @@ -7,20 +7,15 @@ import eventRouter from './routes/event-router.js'; import { NotFoundError, errorHandler } from '@chronosrx/common'; import chronosConfig from './chronos-config.js'; -// const Chronos = require('../../../../chronos_npm_package/chronos'); + import Chronos from '../../../../chronos_npm_package/chronos.js'; -// const chronos = new Chronos(chronosConfig); -// const chronos = new Chronos({ -// ...chronosConfig, -// mode: chronosConfig.mode as 'kafka' | 'kubernetes' | 'microservices' | 'docker', // βœ… Type assertion -// }); const chronos = new Chronos({ ...chronosConfig, mode: chronosConfig.mode as 'kafka' | 'kubernetes' | 'microservices' | 'docker', database: { ...chronosConfig.database, - URI: chronosConfig.database.URI || '', // βœ… Ensure `URI` is always a string + URI: chronosConfig.database.URI || '', // Ensure `URI` is always a string }, }); diff --git a/examples/microservices/auth/src/chronos-config.ts b/examples/microservices/auth/src/chronos-config.ts index 39a065ef8..668cf6581 100644 --- a/examples/microservices/auth/src/chronos-config.ts +++ b/examples/microservices/auth/src/chronos-config.ts @@ -1,59 +1,3 @@ -// const chronosConfig = { -// // General configuration -// microservice: 'auth', -// interval: 5000, - -// // Mode Specific -// mode: 'microservices', -// dockerized: false, - -// database: { -// connection: 'REST', -// type: process.env.CHRONOS_DB, -// URI: process.env.CHRONOS_URI, -// }, - -// notifications: [], -// }; - - -// import path from 'path'; -// // require('dotenv').config({ -// // path: path.resolve(__dirname, '../../.env'), -// // }); - -// import dotenv from 'dotenv'; - -// import { fileURLToPath } from 'url'; - -// // Create __filename and __dirname equivalents -// const __filename = fileURLToPath(import.meta.url); -// const __dirname = path.dirname(__filename); - -// dotenv.config({ -// path: path.resolve(__dirname, '../../.env'), -// }); - - - - -// const chronosConfig = { -// microservice: 'auth', -// interval: 5000, -// mode: process.env.CHRONOS_MODE as 'kafka' | 'kubernetes' | 'microservices' | 'docker', -// dockerized: false, -// database: { -// connection: 'REST', -// type: process.env.CHRONOS_DB as 'MongoDB' | 'PostgreSQL', -// URI: process.env.CHRONOS_URI || '', // βœ… Ensure URI is always a string -// }, -// notifications: [], -// }; - -// export default chronosConfig; - - - import path from 'path'; import dotenv from 'dotenv'; import { fileURLToPath } from 'url'; @@ -68,10 +12,10 @@ dotenv.config({ }); // Debugging logs to check if .env variables are loaded correctly -console.log('πŸ“Œ Loaded CHRONOS_URI:', process.env.CHRONOS_URI); -console.log('πŸ“Œ Loaded CHRONOS_MODE:', process.env.CHRONOS_MODE); -console.log('πŸ“Œ Loaded CHRONOS_DB:', process.env.CHRONOS_DB); -console.log('πŸ“Œ Loaded CHRONOS_CONNECTION:', process.env.CHRONOS_CONNECTION); +console.log(' Loaded CHRONOS_URI:', process.env.CHRONOS_URI); +console.log(' Loaded CHRONOS_MODE:', process.env.CHRONOS_MODE); +console.log(' Loaded CHRONOS_DB:', process.env.CHRONOS_DB); +console.log(' Loaded CHRONOS_CONNECTION:', process.env.CHRONOS_CONNECTION); const chronosConfig = { microservice: 'auth', diff --git a/examples/microservices/auth/src/controllers/auth-controller.ts b/examples/microservices/auth/src/controllers/auth-controller.ts index 5886bfcbe..ef264f9ee 100644 --- a/examples/microservices/auth/src/controllers/auth-controller.ts +++ b/examples/microservices/auth/src/controllers/auth-controller.ts @@ -1,195 +1,3 @@ -// // // import { Request, Response } from 'express'; -// // import axios, { AxiosError, isAxiosError } from 'axios'; -// // import { BadRequestError, CurrentUserRequest, EventTypes, Events } from '@chronosrx/common'; -// // import { User } from '../models/user'; -// // import { attachCookie } from '../util/attachCookie'; - -// // export const signup = async (req: Request, res: Response) => { -// // const { username, password } = req.body; - -// // // Validate inputs -// // if (!username || !password || password.length < 4) { -// // throw new BadRequestError('Invalid inputs'); -// // } - -// // // Check if a user with the supplied username already exists -// // const existingUser = await User.findOne({ username }); -// // if (existingUser) { -// // throw new BadRequestError('User with that username exists'); -// // } - -// // // Create and save the user document -// // const newUser = User.build({ username, password }); -// // await newUser.save(); - -// // // Create an event for the new user -// // const event: Events = { -// // type: EventTypes.USER_CREATED, -// // payload: { -// // id: newUser.id, -// // username: newUser.username, -// // }, -// // }; - -// // // Post the event to the event bus -// // try { -// // await axios.post('http://localhost:3005/', { event }); -// // } catch (err: unknown) { -// // if (isAxiosError(err)) { -// // // err is an AxiosError here -// // console.log( -// // `Failed to emit event USER_CREATED from auth: ${err.message || 'unknown error'}` -// // ); -// // } else if (err instanceof Error) { -// // console.log(`Failed to emit event USER_CREATED from auth: ${err.message}`); -// // } else { -// // console.log('Failed to emit event USER_CREATED from auth: unknown error'); -// // } -// // } - -// // // Create a JWT and attach it to the response cookie -// // const token = newUser.createJwt(); -// // attachCookie(res, token); - -// // res.status(201).send(newUser); -// // }; - -// // export const login = async (req: Request, res: Response) => { -// // const { username, password } = req.body; -// // if (!username || !password) { -// // throw new BadRequestError('Must provide username and password'); -// // } -// // const existingUser = await User.findOne({ username }); -// // if (!existingUser) { -// // throw new BadRequestError('Invalid credentials'); -// // } -// // const passwordsMatch = await existingUser.comparePassword(password); -// // if (!passwordsMatch) { -// // throw new BadRequestError('Invalid credentials'); -// // } -// // const token = existingUser.createJwt(); -// // attachCookie(res, token); -// // res.status(200).send(existingUser); -// // }; - -// // export const logout = async (req: Request, res: Response) => { -// // res.cookie('token', null, { -// // httpOnly: true, -// // secure: false, -// // expires: new Date(Date.now() + 500), -// // }); -// // res.status(200).send({ message: 'success' }); -// // }; - -// // export const getCurrentUser = async (req: CurrentUserRequest, res: Response) => { -// // if (!req.currentUser) { -// // return res.status(200).send({ currentUser: null }); -// // } -// // const user = await User.findById(req.currentUser); -// // res.status(200).send({ currentUser: user }); -// // }; -// // auth-controller.ts - -// // Import Express types from the express package -// import { Request, Response } from 'express'; - -// // Import axios as default, then extract AxiosError and isAxiosError via a workaround -// import axios from 'axios'; -// const { AxiosError, isAxiosError } = axios as any; - -// import { BadRequestError, CurrentUserRequest, EventTypes, Events } from '@chronosrx/common'; -// import { User } from '../models/user'; -// import { attachCookie } from '../util/attachCookie'; - -// // Note: We assert that req.body is our expected type. -// // If you use express.json() middleware in your app setup, req.body will be an object. -// export const signup = async (req: Request, res: Response) => { -// // Assert the type of req.body to include username and password. -// const { username, password } = req.body as { username: string; password: string }; - -// // Validate inputs -// if (!username || !password || password.length < 4) { -// throw new BadRequestError('Invalid inputs'); -// } - -// // Check if a user with the supplied username already exists -// const existingUser = await User.findOne({ username }); -// if (existingUser) { -// throw new BadRequestError('User with that username exists'); -// } - -// // Create the user document and save it -// const newUser = User.build({ username, password }); -// await newUser.save(); - -// // Create an event for the new user -// const event: Events = { -// type: EventTypes.USER_CREATED, -// payload: { -// id: newUser.id, -// username: newUser.username, -// }, -// }; - -// // Post the event to the event bus -// try { -// await axios.post('http://localhost:3005/', { event }); -// } catch (err: unknown) { -// // Use our workaround type guard, if available -// if (isAxiosError && isAxiosError(err)) { -// console.log( -// `Failed to emit event USER_CREATED from auth: ${err.message || 'unknown error'}` -// ); -// } else if (err instanceof Error) { -// console.log(`Failed to emit event USER_CREATED from auth: ${err.message}`); -// } else { -// console.log('Failed to emit event USER_CREATED from auth: unknown error'); -// } -// } - -// // Create a JWT and attach it to the response cookie -// const token = newUser.createJwt(); -// attachCookie(res, token); - -// // Express's Response should have a status() method – if you’re getting errors here, -// // ensure your tsconfig.json does not include "dom" in the "lib" array. -// res.status(201).send(newUser); -// }; - -// export const login = async (req: Request, res: Response) => { -// const { username, password } = req.body as { username: string; password: string }; -// if (!username || !password) { -// throw new BadRequestError('Must provide username and password'); -// } -// const existingUser = await User.findOne({ username }); -// if (!existingUser) { -// throw new BadRequestError('Invalid credentials'); -// } -// const passwordsMatch = await existingUser.comparePassword(password); -// if (!passwordsMatch) { -// throw new BadRequestError('Invalid credentials'); -// } -// const token = existingUser.createJwt(); -// attachCookie(res, token); -// res.status(200).send(existingUser); -// }; - -// export const logout = async (req: Request, res: Response) => { -// res.cookie('token', null, { -// httpOnly: true, -// secure: false, -// expires: new Date(Date.now() + 500), -// }); -// res.status(200).send({ message: 'success' }); -// }; - -// export const getCurrentUser = async (req: CurrentUserRequest, res: Response) => { -// if (!req.currentUser) { -// return res.status(200).send({ currentUser: null }); -// } -// const user = await User.findById(req.currentUser); -// res.status(200).send({ currentUser: user }); -// }; import { Request, Response } from 'express'; import axios from 'axios'; import { BadRequestError, CurrentUserRequest, EventTypes, Events } from '@chronosrx/common'; diff --git a/examples/microservices/auth/src/index.ts b/examples/microservices/auth/src/index.ts index 005b16393..b0f84d41f 100644 --- a/examples/microservices/auth/src/index.ts +++ b/examples/microservices/auth/src/index.ts @@ -1,96 +1,4 @@ -// import { DbConnectionError } from '@chronosrx/common'; -// import { app } from './app.js'; -// import mongoose from 'mongoose'; -// import { User } from './models/user.js'; -// import path from 'path'; -// import dotenv from 'dotenv'; -// dotenv.config({ path: path.resolve(__dirname + '../../.env') }); - -// const PORT = 3000; - -// const start = async () => { -// if (!process.env.MONGO_URI_AUTH) throw new Error('MONGO_URI_AUTH must be defined'); -// if (!process.env.JWT_KEY) throw new Error('JWT_KEY must be defined'); -// if (!process.env.JWT_LIFETIME) throw new Error('JWT_LIFETIME must be defined'); - -// try { -// await mongoose.connect(process.env.MONGO_URI_AUTH, {}); -// console.log('πŸƒ Connected to MongoDB'); - -// await User.deleteMany(); -// const testUser = User.build({ -// username: 'ScrumLord', -// password: 'McKenzie', -// }); -// await testUser.save(); -// } catch (err) { -// throw new DbConnectionError(); -// } - -// app.listen(PORT, async () => { -// console.log(`πŸ’₯ Auth listening on ${PORT}`); -// }); -// }; - -// start(); - -// import { DbConnectionError } from '@chronosrx/common'; -// import { app } from './app.js'; -// import mongoose from 'mongoose'; -// import { User } from './models/user.js'; -// import path from 'path'; -// import dotenv from 'dotenv'; -// import { fileURLToPath } from 'url'; - -// // Recreate __dirname in ES modules -// const __filename = fileURLToPath(import.meta.url); -// const __dirname = path.dirname(__filename); - -// // Load environment variables using the correct path -// dotenv.config({ path: path.resolve(__dirname, '/.env') }); - -// const PORT = 3000; - -// const start = async () => { -// try { -// if (!process.env.MONGO_URI_AUTH) throw new Error('❌ MONGO_URI_AUTH must be defined'); -// if (!process.env.JWT_KEY) throw new Error('❌ JWT_KEY must be defined'); -// if (!process.env.JWT_LIFETIME) throw new Error('❌ JWT_LIFETIME must be defined'); - -// console.log('βœ… Environment variables loaded successfully'); - -// try { -// console.log('πŸ›  Connecting to MongoDB...'); -// await mongoose.connect(process.env.MONGO_URI_AUTH, {}); - -// console.log('πŸƒ Successfully connected to MongoDB'); - -// await User.deleteMany(); -// console.log('πŸ—‘ Deleted existing users'); - -// const testUser = User.build({ -// username: 'ScrumLord', -// password: 'McKenzie', -// }); - -// await testUser.save(); -// console.log('πŸ‘€ Test user created successfully'); -// } catch (err) { -// console.error('πŸ”₯ MongoDB Connection Error:', err); -// throw new DbConnectionError(); -// } - -// app.listen(PORT, () => { -// console.log(`πŸ’₯ Auth service listening on ${PORT}`); -// }); -// } catch (err) { -// console.error('🚨 Startup Error:', err); -// process.exit(1); -// } -// }; - - -// start(); + import { DbConnectionError } from '@chronosrx/common'; import { app } from './app.js'; import mongoose from 'mongoose'; diff --git a/examples/microservices/auth/src/models/user.ts b/examples/microservices/auth/src/models/user.ts index fff51dce1..7540c4e92 100644 --- a/examples/microservices/auth/src/models/user.ts +++ b/examples/microservices/auth/src/models/user.ts @@ -1,81 +1,3 @@ -// import mongoose from 'mongoose'; -// import bcrypt from 'bcryptjs'; -// import jwt from 'jsonwebtoken'; - -// //define attrs -// interface UserAttrs { -// username: string; -// password: string; -// } -// // add a method 'build' to the UserModel -// // mongoose has built-in Model class that takes 'UserDoc' -// interface UserModel extends mongoose.Model { -// build(attrs: UserAttrs): UserDoc; -// } -// //create user data in the database in this shape -// interface UserDoc extends mongoose.Document { -// username: string; -// password: string; -// createJwt: () => string; -// comparePassword: (providedPassword: string) => boolean; -// } -// //create the Schema in mongoose with defined requirements -// const userSchema = new mongoose.Schema( -// { -// username: { -// type: String, -// required: true, -// unique: true, -// }, -// password: { -// type: String, -// required: true, -// }, -// }, -// { -// //anytime we create Json formatted data, transform the user document as following -// toJSON: { -// transform(doc, ret) { -// ret.id = ret._id; -// delete ret._id; -// delete ret.password; -// delete ret.__v; -// }, -// }, -// } -// ); - -// //pre is built-in moogoose function that runs before the function 'save' takes place -// userSchema.pre('save', async function () { -// // Check if password has been created or modified -// if (!this.isModified('password')) return; -// //if the password is modified, hash the password -// const salt = await bcrypt.genSalt(10); -// const hashedPassword = await bcrypt.hash(this.password, salt); -// this.password = hashedPassword; -// }); -// //schema has property: "method", custom define "comparePassword" -// userSchema.methods.comparePassword = async function (providedPassword: string) { -// const isMatch = await bcrypt.compare(providedPassword, this.password); -// return isMatch; -// }; - -// userSchema.methods.createJwt = function () { -// const token = jwt.sign({ userId: this._id }, process.env.JWT_KEY!, { -// expiresIn: process.env.JWT_LIFETIME, -// }); -// return token; -// }; - -// userSchema.statics.build = (attrs: UserAttrs) => { -// //returning user document with (attrs) passed in -// return new User(attrs); -// }; - -// const User = mongoose.model('User', userSchema); - -// export { User }; - import mongoose from 'mongoose'; import bcrypt from 'bcryptjs'; import jwt, { SignOptions } from 'jsonwebtoken'; @@ -180,60 +102,3 @@ export { User }; -// const newUser = User.create({ -// poo: 'doo', -// doo: 'doo' -// }) - -// const testUser = User.build({ -// username: null, -// password: null -// }); -// await testUser.save() - -// const thisUser = await User.findById(userId) -// res.send(thisUser) || res.json(thisUser) - -// testUser.password = "derpdeedoo" - -// TYPESCRIPT STUFF -// *** interfaces define object structure - properties and value data types *** -// interface SampleInterface { -// property: string; -// count: number; -// role: 'user' | 'admin'; -// numArr: number[] | [number]; -// } - -// const sampleObj: SampleInterface = { -// property: 'string', -// count: 10, -// role: 'user', -// numArr: [10], -// }; - -// *** types defined types ayyyy *** -// type interfaceArray = SampleInterface[]; - -// const sampleArr: interfaceArray = [sampleObj]; - -// *** union types specify exact values a type can be *** -// type ActionType = 'USER_CREATED' | 'USER_DELETED' | 'USER_UPDATED'; - -// *** enums: similar to union types but values are accessed similarly to object *** -// *** example below: Action.USER_CREATED = 'USER_CREATED' *** -// enum Action { -// USER_CREATED = 'USER_CREATED', -// USER_DELETED = 'USER_DELETED', -// USER_UPDATED = 'USER_UPDATED', -// } - -// Action. - -// const string: ActionType = 'USER_CREATED'; - -// Action.USER_CREATED; // -> 'USER_CREATED' - -// { -// type: Action.USER_CREATED; -// } diff --git a/examples/microservices/auth/src/test/setup.ts b/examples/microservices/auth/src/test/setup.ts index 4d774b082..a43972b9b 100644 --- a/examples/microservices/auth/src/test/setup.ts +++ b/examples/microservices/auth/src/test/setup.ts @@ -1,56 +1,3 @@ -// import { MongoMemoryServer } from 'mongodb-memory-server'; -// import mongoose from 'mongoose'; -// import { app } from '../app'; -// import request from 'supertest'; - -// declare global { -// function login(): Promise; -// } - -// let mongo: any; - -// beforeAll(async () => { -// process.env.JWT_KEY = 'asdfasdfasdf'; - -// mongo = await MongoMemoryServer.create(); -// const mongoUri = mongo.getUri(); - -// await mongoose.connect(mongoUri, {}); -// }); - -// beforeEach(async () => { -// const collections = await mongoose.connection.db.collections(); - -// for (let collection of collections) { -// await collection.deleteMany({}); -// } -// }); - -// afterAll(async () => { -// if (mongo) { -// await mongo.stop(); -// } - -// await mongoose.connection.close(); -// }); - -// global.login = async () => { -// const username = 'test lord'; -// const password = 'test1234'; - -// const response = await request(app) -// .post('/api/auth/signup') -// .send({ -// username, -// password, -// }) -// .expect(200); - -// const cookie = response.get('Set-Cookie'); - -// return cookie; -// }; - import { MongoMemoryServer } from 'mongodb-memory-server'; import mongoose from 'mongoose'; import { app } from '../app.js'; diff --git a/examples/microservices/auth/tsconfig.json b/examples/microservices/auth/tsconfig.json index 96dbd2c25..726573b50 100644 --- a/examples/microservices/auth/tsconfig.json +++ b/examples/microservices/auth/tsconfig.json @@ -2,103 +2,24 @@ "compilerOptions": { /* Visit https://aka.ms/tsconfig to read more about this file */ - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - "types": ["jest", "node"], + "types": ["jest", "node"], /* Language and Environment */ - "target": "ESNext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + "target": "ESNext" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, /* Modules */ - "module": "NodeNext", /* Specify what module code is generated. */ - // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "nodenext", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + "module": "NodeNext" /* Specify what module code is generated. */, - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + "moduleResolution": "nodenext" /* Specify how TypeScript looks up a file from a given module specifier. */, - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + "allowSyntheticDefaultImports": true /* Allow 'import x from y' when a module doesn't have a default export. */, + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */, - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, /* Type Checking */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + "strict": true, - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ }, "include": ["src/**/*"] } diff --git a/examples/microservices/client/src/chronos-config.js b/examples/microservices/client/src/chronos-config.js index 7dc4d27bf..6ca255c16 100644 --- a/examples/microservices/client/src/chronos-config.js +++ b/examples/microservices/client/src/chronos-config.js @@ -32,9 +32,9 @@ // }; // module.exports = chronosConfig; import path from 'path'; +import {dirname} from 'path'; import { fileURLToPath } from 'url'; import dotenv from 'dotenv'; - // Recreate __dirname for ESM const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); diff --git a/examples/microservices/client/src/server.js b/examples/microservices/client/src/server.js index d47c33ea8..46b56a0e1 100644 --- a/examples/microservices/client/src/server.js +++ b/examples/microservices/client/src/server.js @@ -1,151 +1,23 @@ -// const path = require('path'); -// const express = require('express'); -// const PORT = 5001; - -// const chronosConfig = require('./chronos-config'); -// const Chronos = require('../../../../chronos_npm_package/chronos'); -// const chronos = new Chronos(chronosConfig); -// chronos.propagate(); - -// const app = express(); - -// const trackingMiddleware = chronos.track(); -// app.use(trackingMiddleware); - -// app.use('/assets', express.static(path.resolve(__dirname, './assets'))); - -// app.use('*', (req, res) => { -// res.status(200).sendFile(path.resolve(__dirname, './index.html')); -// }); - -// app.use((err, req, res, next) => { -// res.status(500).send({ message: 'Something went wrong' }); -// }); - -// const start = () => { -// app.listen(PORT, () => { -// console.log(`Client server listening on port ${PORT}`); -// }); -// }; - -// start(); -// const path = require('path'); -// const express = require('express'); - -// const PORT = 5001; - -// // Verify that './chronos-config' exists and exports the proper configuration -// const chronosConfig = require('./chronos-config'); - -// // Verify that the relative path points to your Chronos module. -// // If Chronos is published on npm or is available via a package alias, you might instead do: -// // const Chronos = require('chronos'); -// // Otherwise, ensure that the file exists at the given relative path. -// const Chronos = require('../../../../chronos_npm_package/dist/chronos.js'); -// // const Chronos = require('chronos'); - -// // Create a new Chronos instance with your configuration -// const chronos = new Chronos(chronosConfig); - -// // Initialize propagation. (Ensure that chronos.propagate() is defined and works as expected.) -// chronos.propagate(); - -// // Create the Express app -// const app = express(); - -// // Use Chronos’ tracking middleware -// const trackingMiddleware = chronos.track(); -// app.use(trackingMiddleware); - -// // Serve static assets from the './assets' directory -// app.use('/assets', express.static(path.resolve(__dirname, './assets'))); - -// // For any other route, send the index.html file -// app.use('*', (req, res) => { -// res.status(200).sendFile(path.resolve(__dirname, './index.html')); -// }); - -// // Error handling middleware -// app.use((err, req, res, next) => { -// res.status(500).send({ message: 'Something went wrong' }); -// }); - -// // Start the server on PORT -// const start = () => { -// app.listen(PORT, () => { -// console.log(`Client server listening on port ${PORT}`); -// }); -// }; - -// const path = require('path'); -// const express = require('express'); - -// const PORT = 5001; - -// // Verify that './chronos-config' exists and exports the proper configuration -// const chronosConfig = require('./chronos-config'); - -// (async () => { -// try { -// // Dynamically import the Chronos ES module -// const ChronosModule = await import('../../../../chronos_npm_package/dist/chronos.js'); -// const Chronos = ChronosModule.default; - -// // Create a new Chronos instance with your configuration -// const chronos = new Chronos(chronosConfig); - -// // Initialize propagation (assuming chronos.propagate() is defined) -// chronos.propagate(); - -// // Create the Express app -// const app = express(); - -// // Use Chronos’ tracking middleware -// const trackingMiddleware = chronos.track(); -// app.use(trackingMiddleware); - -// // Serve static assets from the './assets' directory -// app.use('/assets', express.static(path.resolve(__dirname, './assets'))); - -// // For any other route, send the index.html file -// app.use('*', (req, res) => { -// res.status(200).sendFile(path.resolve(__dirname, './index.html')); -// }); - -// // Error handling middleware -// app.use((err, req, res, next) => { -// res.status(500).send({ message: 'Something went wrong' }); -// }); - -// // Start the server on PORT -// app.listen(PORT, () => { -// console.log(`Client server listening on port ${PORT}`); -// }); -// } catch (err) { -// console.error('Failed to load Chronos module:', err); -// process.exit(1); -// } -// })(); import path from 'path'; import express from 'express'; - +import { dirname } from 'path'; const PORT = 5001; // Verify that './chronos-config' exists and exports the proper configuration // const chronosConfig = require('./chronos-config'); -// import chronosConfig from './chronos-config.js'; - -import * as chronosConfig from './chronos-config.js'; +import chronosConfig from './chronos-config.js'; +import {Chronos} from '../../../../chronos_npm_package/dist/chronos.js' +// import * as chronosConfig from './chronos-config.js'; (async () => { try { // Dynamically import the Chronos ES module from the dist folder - const ChronosModule = await import('../../../../chronos_npm_package/dist/chronos.js'); - const Chronos = ChronosModule; + const ChronosModule = await Chronos; + // const Chronos = ChronosModule; // Create a new Chronos instance with your configuration - const chronos = new Chronos(chronosConfig); + const chronos = new ChronosModule(chronosConfig); // Initialize propagation (assumes chronos.propagate() is defined) chronos.propagate(); @@ -158,7 +30,7 @@ import * as chronosConfig from './chronos-config.js'; app.use(trackingMiddleware); // Serve static assets from the './assets' directory - app.use('/assets', express.static(path.resolve(__dirname, './assets'))); + app.use('/assets', express.static('assets')); // For any other route, send the index.html file app.use('*', (req, res) => { diff --git a/hpropagate.d.ts b/hpropagate.d.ts new file mode 100644 index 000000000..66d63d89f --- /dev/null +++ b/hpropagate.d.ts @@ -0,0 +1,4 @@ +declare module 'hpropagate' { + const hpropagate: (options?: { propagateInResponses?: boolean }) => void; + export default hpropagate; +} diff --git a/index.cjs b/index.cjs new file mode 100644 index 000000000..23e1e1104 --- /dev/null +++ b/index.cjs @@ -0,0 +1,16 @@ +// index.cjs +(async () => { + try { + // Dynamically import your main ES module + const mainModule = await import('./build/electron/Main.js'); + + // If your Main.js exports a default function, call it. + if (typeof mainModule.default === 'function') { + mainModule.default(); + } else { + console.error('Main.js does not export a default function.'); + } + } catch (err) { + console.error('Error loading Main.js:', err); + } +})(); diff --git a/package.json b/package.json index 47af3df62..8103faa59 100644 --- a/package.json +++ b/package.json @@ -118,7 +118,7 @@ "backend-test": "npx jest --config __backend-tests__/jest.config.js --verbose", "start:selenium": "node e2e/seleniumTest.js", "start:e2e": "npm run dev:electron & npm run start:selenium", - "start:electron": "concurrently \"npm:build\" \"npm:dev:app\" \"npm:dev:electron\"", + "start:electron": "concurrently \"npm:dev:app\" \"npm:dev:electron\"", "start:microservices": "cd examples/microservices && make all && npm run pkg:install && npm start" }, "repository": { diff --git a/tsconfig.json b/tsconfig.json index d7df41538..1746414d8 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -14,11 +14,9 @@ "jsx": "react-jsx", "esModuleInterop": true, "moduleResolution": "nodenext", //@see https://stackoverflow.com/questions/72638285/cannot-find-module-mongoose-with-typescript - // "resolveJsonModule": true, //! Error: Option '--resolveJsonModule' cannot be specified when 'moduleResolution' is set to 'classic'.ts "skipLibCheck": true, "noEmitOnError": true, "allowSyntheticDefaultImports": true, - // "types": ["jest", "node", "@testing-library/jest-dom"], "types": ["jest", "node"] }, "exclude": [ diff --git a/webpack.config.cjs b/webpack.config.cjs new file mode 100644 index 000000000..9849526a3 --- /dev/null +++ b/webpack.config.cjs @@ -0,0 +1,91 @@ +const path = require('path'); +const fs = require('fs'); +const HtmlWebpackPlugin = require('html-webpack-plugin'); +const CopyWebpackPlugin = require('copy-webpack-plugin'); + +module.exports = { + stats: { + warningsFilter: (warning) => warning.includes('Deprecation'), // ignores SCSS deprecation warnings + }, + entry: './app/index.tsx', + output: { + path: path.resolve(__dirname, 'dist'), + filename: 'bundle.[contenthash].js', + }, + devtool: 'eval-source-map', + module: { + rules: [ + { + test: /\.[jt]sx?$/, + loader: 'esbuild-loader', + options: { + // JavaScript version to compile to + target: 'es2015', + }, + exclude: /node_modules/, + }, + { + test: /\.s?css$/, + use: ['style-loader', 'css-loader', "sass-loader", +{ + loader: 'sass-loader', + options: { + implementation: require('sass') // Use Dart Sass + }, +} + ], + exclude: /node_modules/, + }, + { + test: /\.(jpg|jpeg|png|ttf|svg|gif)$/, + type: 'asset/resource', + exclude: /node_modules/, + }, + ], + }, + mode: 'development', + devServer: { + port: 8080, + hot: true, + historyApiFallback: true, + static: './app', + }, + plugins: [ + new HtmlWebpackPlugin({ + template: 'app/index.html', + }), + new CopyWebpackPlugin({ + patterns: [ + { + from: path.resolve(__dirname, 'node_modules/react-devtools'), // Path to the React DevTools directory in node_modules + to: 'react-devtools', // Output directory in your webpack build + }, + ], + }) + ], + resolve: { + extensions: ['.js', '.jsx', '.ts', '.tsx', '.gif', '.png', '.svg'], + }, +}; +// webpack.config.js (ES Module version) +// import path from 'path'; +// import HtmlWebpackPlugin from 'html-webpack-plugin'; + +// // Note: In an ES module, __dirname is not available by default. +// // We can use process.cwd() to get the current working directory, or define __dirname manually. +// const __dirname = process.cwd(); + +// export default { +// mode: 'development', // or 'production' depending on your needs +// entry: './src/index.js', // adjust the entry point as needed +// output: { +// path: path.resolve(__dirname, 'dist'), +// filename: 'bundle.js', +// }, +// plugins: [ +// new HtmlWebpackPlugin({ +// template: './src/index.html', // adjust the template path as needed +// }), +// ], +// // Add any additional configuration options below +// };