feature/IO-3029-Enhanced-Logging-File-Based: Add File based S3 Logging.
Signed-off-by: Dave Richer <dave@imexsystems.ca>
This commit is contained in:
@@ -15,7 +15,7 @@ const { canvastest } = require("../render/canvas-handler");
|
||||
const { alertCheck } = require("../alerts/alertcheck");
|
||||
|
||||
//Test route to ensure Express is responding.
|
||||
router.get("/test", async function (req, res) {
|
||||
router.get("/test", eventAuthorizationMiddleware, async function (req, res) {
|
||||
const commit = require("child_process").execSync("git rev-parse --short HEAD");
|
||||
// console.log(app.get('trust proxy'));
|
||||
// console.log("remoteAddress", req.socket.remoteAddress);
|
||||
@@ -32,6 +32,30 @@ router.get("/test", async function (req, res) {
|
||||
res.status(200).send(`OK - ${commit}`);
|
||||
});
|
||||
|
||||
router.get("/test-logs", eventAuthorizationMiddleware, (req, res) => {
|
||||
const { logger } = req;
|
||||
// // Test 1: Log with a message that exceeds the size limit, triggering an upload to S3.
|
||||
const largeMessage = "A".repeat(256 * 1024 + 1); // Message larger than the log size limit
|
||||
logger.log(largeMessage, "error", "user123", null, { detail: "large log entry" });
|
||||
|
||||
// Test 2: Log with a message that is within the size limit, should log directly using winston.
|
||||
const smallMessage = "A small log message";
|
||||
logger.log(smallMessage, "info", "user123", null, { detail: "small log entry" });
|
||||
|
||||
// Test 3: Log with the `upload` flag set to `true`, forcing the log to be uploaded to S3.
|
||||
logger.log(
|
||||
"This log will be uploaded to S3 regardless of size",
|
||||
"warning",
|
||||
"user123",
|
||||
null,
|
||||
{ detail: "upload log" },
|
||||
true
|
||||
);
|
||||
|
||||
// Test 4: Log with a message that doesn't exceed the size limit and doesn't require an upload.
|
||||
logger.log("Normal log entry", "debug", "user123", { id: 4 }, { detail: "normal log entry" });
|
||||
});
|
||||
|
||||
// Search
|
||||
router.post("/search", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, os.search);
|
||||
router.post("/opensearch", eventAuthorizationMiddleware, os.handler);
|
||||
|
||||
@@ -9,6 +9,8 @@ const winston = require("winston");
|
||||
const WinstonCloudWatch = require("winston-cloudwatch");
|
||||
const { isString, isEmpty } = require("lodash");
|
||||
const { networkInterfaces, hostname } = require("node:os");
|
||||
const { uploadFileToS3 } = require("./s3");
|
||||
const { v4 } = require("uuid");
|
||||
|
||||
const LOG_LEVELS = {
|
||||
error: { level: 0, name: "error" },
|
||||
@@ -20,6 +22,24 @@ const LOG_LEVELS = {
|
||||
silly: { level: 6, name: "silly" }
|
||||
};
|
||||
|
||||
const LOG_LENGTH_LIMIT = 256 * 1024; // 256KB
|
||||
|
||||
const S3_BUCKET_NAME = InstanceManager({
|
||||
imex: "imex-large-log",
|
||||
rome: "rome-large-log"
|
||||
});
|
||||
|
||||
const estimateLogSize = (logEntry) => {
|
||||
let estimatedSize = 0;
|
||||
for (const key in logEntry) {
|
||||
if (logEntry.hasOwnProperty(key)) {
|
||||
const value = logEntry[key];
|
||||
estimatedSize += key.length + (typeof value === "string" ? value.length : JSON.stringify(value).length);
|
||||
}
|
||||
}
|
||||
return estimatedSize;
|
||||
};
|
||||
|
||||
const normalizeLevel = (level) => (level ? level.toLowerCase() : LOG_LEVELS.debug.name);
|
||||
|
||||
const createLogger = () => {
|
||||
@@ -124,15 +144,56 @@ const createLogger = () => {
|
||||
);
|
||||
}
|
||||
|
||||
const log = (message, type, user, record, meta) => {
|
||||
winstonLogger.log({
|
||||
const log = (message, type, user, record, meta, upload) => {
|
||||
const logEntry = {
|
||||
level: normalizeLevel(type),
|
||||
message,
|
||||
user,
|
||||
record,
|
||||
hostname: internalHostname,
|
||||
meta
|
||||
});
|
||||
};
|
||||
|
||||
const uploadLogToS3 = (logEntry, message, type, user) => {
|
||||
const uniqueId = v4();
|
||||
const dateTimeString = new Date().toISOString().replace(/:/g, "-");
|
||||
const logStreamName = `${dateTimeString}-${internalHostname}-${uniqueId}`;
|
||||
const logString = JSON.stringify(logEntry);
|
||||
|
||||
uploadFileToS3({ bucketName: S3_BUCKET_NAME, key: logStreamName, content: logString })
|
||||
.then(() => {
|
||||
log("A log file has been uploaded to S3", "info", "S3", null, {
|
||||
logStreamName,
|
||||
message: message?.slice(0, 200),
|
||||
type,
|
||||
user
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
log("Error in S3 Upload", "error", "S3", null, {
|
||||
logStreamName,
|
||||
message: message?.slice(0, 100),
|
||||
type,
|
||||
user,
|
||||
errorMessage: err?.message?.slice(0, 100)
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const checkAndUploadLog = () => {
|
||||
const logString = JSON.stringify(logEntry);
|
||||
const logSize = Buffer.byteLength(logString, "utf8");
|
||||
|
||||
if (logSize > LOG_LENGTH_LIMIT * 0.9 || logSize > LOG_LENGTH_LIMIT) {
|
||||
uploadLogToS3(logEntry, message, type, user);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (upload || checkAndUploadLog()) return;
|
||||
|
||||
winstonLogger.log(logEntry);
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
112
server/utils/s3.js
Normal file
112
server/utils/s3.js
Normal file
@@ -0,0 +1,112 @@
|
||||
const {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
ListObjectsV2Command,
|
||||
DeleteObjectCommand,
|
||||
CopyObjectCommand
|
||||
} = require("@aws-sdk/client-s3");
|
||||
const { defaultProvider } = require("@aws-sdk/credential-provider-node");
|
||||
const { default: InstanceManager } = require("./instanceMgr");
|
||||
const { isString, isEmpty } = require("lodash");
|
||||
|
||||
const createS3Client = () => {
|
||||
const S3Options = {
|
||||
region: InstanceManager({
|
||||
imex: "ca-central-1",
|
||||
rome: "us-east-2"
|
||||
}),
|
||||
credentials: defaultProvider()
|
||||
};
|
||||
|
||||
const isLocal = isString(process.env?.LOCALSTACK_HOSTNAME) && !isEmpty(process.env?.LOCALSTACK_HOSTNAME);
|
||||
|
||||
if (isLocal) {
|
||||
S3Options.endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:4566`;
|
||||
S3Options.forcePathStyle = true; // Needed for LocalStack to avoid bucket name as hostname
|
||||
}
|
||||
|
||||
const s3Client = new S3Client(S3Options);
|
||||
|
||||
/**
|
||||
* Uploads a file to the specified S3 bucket and key.
|
||||
*/
|
||||
const uploadFileToS3 = async ({ bucketName, key, content, contentType }) => {
|
||||
const params = {
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
Body: content,
|
||||
ContentType: contentType ?? "application/json"
|
||||
};
|
||||
const command = new PutObjectCommand(params);
|
||||
return await s3Client.send(command);
|
||||
};
|
||||
|
||||
/**
|
||||
* Downloads a file from the specified S3 bucket and key.
|
||||
*/
|
||||
const downloadFileFromS3 = async ({ bucketName, key }) => {
|
||||
const params = { Bucket: bucketName, Key: key };
|
||||
const command = new GetObjectCommand(params);
|
||||
const data = await s3Client.send(command);
|
||||
return data.Body;
|
||||
};
|
||||
|
||||
/**
|
||||
* Lists objects in the specified S3 bucket.
|
||||
*/
|
||||
const listFilesInS3Bucket = async (bucketName, prefix = "") => {
|
||||
const params = { Bucket: bucketName, Prefix: prefix };
|
||||
const command = new ListObjectsV2Command(params);
|
||||
const data = await s3Client.send(command);
|
||||
return data.Contents || [];
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a file from the specified S3 bucket and key.
|
||||
*/
|
||||
const deleteFileFromS3 = async ({ bucketName, key }) => {
|
||||
const params = { Bucket: bucketName, Key: key };
|
||||
const command = new DeleteObjectCommand(params);
|
||||
return await s3Client.send(command);
|
||||
};
|
||||
|
||||
/**
|
||||
* Copies a file within S3 from a source bucket/key to a destination bucket/key.
|
||||
*/
|
||||
const copyFileInS3 = async ({ sourceBucket, sourceKey, destinationBucket, destinationKey }) => {
|
||||
const params = {
|
||||
CopySource: `/${sourceBucket}/${sourceKey}`,
|
||||
Bucket: destinationBucket,
|
||||
Key: destinationKey
|
||||
};
|
||||
const command = new CopyObjectCommand(params);
|
||||
return await s3Client.send(command);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a file exists in the specified S3 bucket and key.
|
||||
*/
|
||||
const fileExistsInS3 = async ({ bucketName, key }) => {
|
||||
try {
|
||||
await downloadFileFromS3({ bucketName, key });
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error.name === "NoSuchKey" || error.name === "NotFound") {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
uploadFileToS3,
|
||||
downloadFileFromS3,
|
||||
listFilesInS3Bucket,
|
||||
deleteFileFromS3,
|
||||
copyFileInS3,
|
||||
fileExistsInS3
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = createS3Client();
|
||||
Reference in New Issue
Block a user