31 Commits

Author SHA1 Message Date
Patrick FIc
f5444586a5 Major release. 2025-11-14 11:44:34 -08:00
Allan Carr
4e1484925a IO-3437 Beta 4 2025-11-10 20:08:28 -08:00
Allan Carr
e8d69d0b09 IO-3437 Beta 4 2025-11-10 20:06:43 -08:00
Allan Carr
a322fb5ca4 IO-3437 Beta 4 2025-11-10 16:11:05 -08:00
Allan Carr
a093fa20a6 IO-3437 Beta 3 2025-11-10 15:19:49 -08:00
Allan Carr
95302b4fe0 IO-3437 Beta.2 2025-11-10 14:02:50 -08:00
Allan Carr
1122aaed3d Merge branch 'release/1.0.8' of bitbucket.org:snaptsoft/bodyshop-desktop into release/1.0.8 2025-11-10 14:00:52 -08:00
Allan Carr
93ee3bafe6 IO-3437 Correct EMS Export 2025-11-10 14:00:40 -08:00
Patrick FIc
18f6f01d49 Beta release. 2025-11-05 08:36:33 -08:00
Allan Carr
6e495caf16 IO-3423 Insd backfill 2025-10-30 13:26:33 -07:00
Allan Carr
ff809d26d0 IO-3423 Use INSD if CLMD and OWNR Blank 2025-10-29 19:56:53 -07:00
Patrick Fic
07a4427a0b Update version to final release. 2025-10-22 12:18:53 -07:00
Patrick FIc
680ae4ca08 FInalized package updates and resolve electron store. 2025-10-21 11:30:56 -07:00
Patrick FIc
e6e1785413 All package updates. 2025-10-21 10:46:12 -07:00
Patrick FIc
bf1e137c6c Resolve possible update memory leak., 2025-10-21 10:03:40 -07:00
Patrick FIc
6674f33be9 Minor refactor. 2025-10-20 21:29:48 -07:00
Patrick Fic
7742d6f89f Increase heap log size to five GB. 2025-10-20 17:07:01 -07:00
Patrick Fic
6d830ae98b Update browser window calls. 2025-10-20 14:52:37 -07:00
Patrick Fic
19bd41375e Add extensive memory logging. 2025-10-20 14:25:46 -07:00
Patrick Fic
cf0d457d1c Merge branch '1.0.6' 2025-09-17 12:03:07 -07:00
Patrick Fic
83ca7a251b Final release for 1.0.6. Resolve issue with destructure on CCC PPC. 2025-09-17 12:02:57 -07:00
Patrick FIc
41caa76b28 Remove notification for update. 2025-09-10 14:59:24 -07:00
Allan Carr
45bc12a2f5 IO-3361 PPG Mix Data 2025-09-10 13:51:43 -07:00
Patrick FIc
093012c8f7 Merge branch 'main' of bitbucket.org:snaptsoft/bodyshop-desktop 2025-09-09 16:07:43 -07:00
Patrick FIc
bd9fa67087 Check release channel each time. 2025-09-09 16:07:26 -07:00
Patrick Fic
c5bdb62cb6 Merge branch '1.0.5' 2025-09-09 16:06:38 -07:00
Patrick Fic
e9dd8ff760 Finalize release. 2025-09-09 16:06:20 -07:00
Patrick FIc
1c839ee3f8 Update executable names for beta and alpha paths. 2025-09-09 15:47:31 -07:00
Patrick Fic
9fdd88526c IO-3358 Resolve update channel & null claim number. 2025-09-08 10:30:53 -07:00
Patrick FIc
d5b40ef6f4 Merge branch '1.0.3' 2025-08-28 13:01:44 -07:00
Patrick Fic
3b918d3fcb Merge branch '1.0.3' 2025-08-28 12:55:02 -07:00
32 changed files with 3623 additions and 2953 deletions

View File

@@ -2,7 +2,7 @@ VITE_FIREBASE_CONFIG={"apiKey":"AIzaSyDPLT8GiDHDR1R4nI66Qi0BY1aYviDPioc","authDo
VITE_GRAPHQL_ENDPOINT=https://db.dev.imex.online/v1/graphql VITE_GRAPHQL_ENDPOINT=https://db.dev.imex.online/v1/graphql
VITE_FIREBASE_CONFIG_TEST={ "apiKey":"AIzaSyBw7_GTy7GtQyfkIRPVrWHEGKfcqeyXw0c", "authDomain":"imex-test.firebaseapp.com", "projectId":"imex-test", "storageBucket":"imex-test.appspot.com", "messagingSenderId":"991923618608", "appId":"1:991923618608:web:633437569cdad78299bef5", "measurementId":"G-TW0XLZEH18"} VITE_FIREBASE_CONFIG_TEST={ "apiKey":"AIzaSyBw7_GTy7GtQyfkIRPVrWHEGKfcqeyXw0c", "authDomain":"imex-test.firebaseapp.com", "projectId":"imex-test", "storageBucket":"imex-test.appspot.com", "messagingSenderId":"991923618608", "appId":"1:991923618608:web:633437569cdad78299bef5", "measurementId":"G-TW0XLZEH18"}
VITE_GRAPHQL_ENDPOINT_TEST=https://db.test.bodyshop.app/v1/graphql VITE_GRAPHQL_ENDPOINT_TEST=https://db.test.bodyshop.app/v1/graphql
VITE_COMPANY=ROME VITE_COMPANY=IMEX
VITE_FE_URL=https://imex.online VITE_FE_URL=https://imex.online
VITE_FE_URL_TEST=https://test.imex.online VITE_FE_URL_TEST=https://test.imex.online
VITE_API_URL="http://localhost:4000" VITE_API_URL="http://localhost:4000"

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env node
// Cross-platform script to set artifact naming based on version
const fs = require('fs');
const path = require('path');
const { spawn } = require('child_process');
// Read the package.json to get the version
const packageJsonPath = path.join(__dirname, '..', 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
const version = packageJson.version;
console.log(`Current version: ${version}`);
// Determine the artifact suffix based on the version
let artifactSuffix = '';
if (version.includes('alpha')) {
artifactSuffix = `alpha-${version}-`;
console.log(`Detected alpha version, setting suffix to: ${artifactSuffix}`);
} else if (version.includes('beta')) {
artifactSuffix = `beta-${version}-`;
console.log(`Detected beta version, setting suffix to: ${artifactSuffix}`);
} else {
artifactSuffix = '';
console.log('Detected release version, no suffix will be added');
}
// Set the environment variable for the current process
process.env.ARTIFACT_SUFFIX = artifactSuffix;
console.log(`ARTIFACT_SUFFIX set to: '${artifactSuffix}'`);
// If arguments are passed, execute the remaining command with the environment variable set
if (process.argv.length > 2) {
const command = process.argv[2];
const args = process.argv.slice(3);
console.log(`Executing: ${command} ${args.join(' ')}`);
const child = spawn(command, args, {
stdio: 'inherit',
env: { ...process.env, ARTIFACT_SUFFIX: artifactSuffix },
shell: true
});
child.on('close', (code) => {
process.exit(code);
});
} else {
// Just setting the environment variable
console.log('Environment variable set. Use this script with additional arguments to run commands with the variable set.');
}

View File

@@ -0,0 +1,38 @@
# PowerShell script to set artifact naming based on version
param(
[string]$ConfigType = "imex"
)
# Read the package.json to get the version
$packageJsonPath = Join-Path $PSScriptRoot "..\package.json"
$packageJson = Get-Content $packageJsonPath | ConvertFrom-Json
$version = $packageJson.version
Write-Host "Current version: $version"
# Determine the artifact suffix based on the version
$artifactSuffix = ""
if ($version -match "alpha") {
$artifactSuffix = "alpha-${version}-"
Write-Host "Detected alpha version, setting suffix to: $artifactSuffix"
}
elseif ($version -match "beta") {
$artifactSuffix = "beta-${version}-"
Write-Host "Detected beta version, setting suffix to: $artifactSuffix"
}
else {
$artifactSuffix = ""
Write-Host "Detected release version, no suffix will be added"
}
# Set the environment variable
$env:ARTIFACT_SUFFIX = $artifactSuffix
# Export for the current session
[Environment]::SetEnvironmentVariable("ARTIFACT_SUFFIX", $artifactSuffix, "Process")
Write-Host "ARTIFACT_SUFFIX set to: '$env:ARTIFACT_SUFFIX'"
# Return the suffix for use in other scripts
return $artifactSuffix

View File

@@ -0,0 +1,33 @@
#!/bin/bash
# Bash script to set artifact naming based on version
# Get the directory where this script is located
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Read the package.json to get the version
PACKAGE_JSON_PATH="$SCRIPT_DIR/../package.json"
VERSION=$(node -p "require('$PACKAGE_JSON_PATH').version")
echo "Current version: $VERSION"
# Determine the artifact suffix based on the version
ARTIFACT_SUFFIX=""
if [[ $VERSION == *"alpha"* ]]; then
ARTIFACT_SUFFIX="alpha-${VERSION}-"
echo "Detected alpha version, setting suffix to: $ARTIFACT_SUFFIX"
elif [[ $VERSION == *"beta"* ]]; then
ARTIFACT_SUFFIX="beta-${VERSION}-"
echo "Detected beta version, setting suffix to: $ARTIFACT_SUFFIX"
else
ARTIFACT_SUFFIX=""
echo "Detected release version, no suffix will be added"
fi
# Export the environment variable
export ARTIFACT_SUFFIX="$ARTIFACT_SUFFIX"
echo "ARTIFACT_SUFFIX set to: '$ARTIFACT_SUFFIX'"
# Also write to a temporary file for sourcing in other scripts
echo "export ARTIFACT_SUFFIX='$ARTIFACT_SUFFIX'" > "$SCRIPT_DIR/.artifact-suffix.env"

View File

@@ -0,0 +1,64 @@
#!/usr/bin/env node
// Cross-platform test script to demonstrate artifact naming for different versions
const fs = require('fs');
const path = require('path');
console.log('=== Artifact Naming Test (Cross-Platform) ===');
console.log('');
// Get current version
const packageJsonPath = path.join(__dirname, '..', 'package.json');
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
const currentVersion = packageJson.version;
console.log(`Current version: ${currentVersion}`);
// Function to get artifact suffix
function getArtifactSuffix(version) {
if (version.includes('alpha')) {
return `alpha-${version}-`;
} else if (version.includes('beta')) {
return `beta-${version}-`;
} else {
return '';
}
}
// Test scenarios
const testVersions = [
'1.0.5', // Release version
'1.0.5-alpha.2', // Alpha version
'1.0.5-beta.1', // Beta version
'2.0.0-alpha.1', // Another alpha
'1.5.0-beta.3' // Another beta
];
console.log('Test scenarios:');
console.log('==================');
testVersions.forEach(version => {
const suffix = getArtifactSuffix(version);
// Different artifact names for different platforms
const windowsArtifact = `imex-partner-${suffix}x64.exe`;
const macArtifact = `imex-partner-${suffix}x64.dmg`;
const linuxArtifact = `imex-partner-${suffix}x64.AppImage`;
console.log(`Version: ${version}`);
console.log(` Suffix: '${suffix}'`);
console.log(` Windows: ${windowsArtifact}`);
console.log(` Mac: ${macArtifact}`);
console.log(` Linux: ${linuxArtifact}`);
console.log('');
});
console.log('Current configuration will produce:');
const currentSuffix = getArtifactSuffix(currentVersion);
console.log(` Windows: imex-partner-${currentSuffix}x64.exe`);
console.log(` Mac: imex-partner-${currentSuffix}x64.dmg`);
console.log(` Linux: imex-partner-${currentSuffix}x64.AppImage`);
console.log('');
console.log(`Platform detected: ${process.platform}`);
console.log(`Architecture: ${process.arch}`);

View File

@@ -0,0 +1,48 @@
# Test script to demonstrate artifact naming for different versions
Write-Host "=== Artifact Naming Test ==="
Write-Host ""
# Test current version
$packageJsonPath = ".\package.json"
$packageJson = Get-Content $packageJsonPath | ConvertFrom-Json
$currentVersion = $packageJson.version
Write-Host "Current version: $currentVersion"
# Function to get artifact suffix
function Get-ArtifactSuffix($version) {
if ($version -match "alpha") {
return "alpha-${version}-"
} elseif ($version -match "beta") {
return "beta-${version}-"
} else {
return ""
}
}
# Test scenarios
$testVersions = @(
"1.0.5", # Release version
"1.0.5-alpha.2", # Alpha version
"1.0.5-beta.1", # Beta version
"2.0.0-alpha.1", # Another alpha
"1.5.0-beta.3" # Another beta
)
Write-Host "Test scenarios:"
Write-Host "=================="
foreach ($version in $testVersions) {
$suffix = Get-ArtifactSuffix $version
$artifactName = "imex-partner-${suffix}x64.exe"
Write-Host "Version: $version"
Write-Host " Suffix: '$suffix'"
Write-Host " Result: $artifactName"
Write-Host ""
}
Write-Host "Current configuration will produce:"
$currentSuffix = Get-ArtifactSuffix $currentVersion
$currentArtifact = "imex-partner-${currentSuffix}x64.exe"
Write-Host " $currentArtifact"

View File

@@ -0,0 +1,50 @@
#!/bin/bash
# Test script to demonstrate artifact naming for different versions on Mac
echo "=== Artifact Naming Test (Mac) ==="
echo ""
# Get current version
PACKAGE_JSON_PATH="./package.json"
CURRENT_VERSION=$(node -p "require('$PACKAGE_JSON_PATH').version")
echo "Current version: $CURRENT_VERSION"
# Function to get artifact suffix
get_artifact_suffix() {
local version=$1
if [[ $version == *"alpha"* ]]; then
echo "alpha-${version}-"
elif [[ $version == *"beta"* ]]; then
echo "beta-${version}-"
else
echo ""
fi
}
# Test scenarios
TEST_VERSIONS=(
"1.0.5" # Release version
"1.0.5-alpha.2" # Alpha version
"1.0.5-beta.1" # Beta version
"2.0.0-alpha.1" # Another alpha
"1.5.0-beta.3" # Another beta
)
echo "Test scenarios:"
echo "=================="
for version in "${TEST_VERSIONS[@]}"; do
suffix=$(get_artifact_suffix "$version")
artifact_name="imex-partner-${suffix}x64.dmg"
echo "Version: $version"
echo " Suffix: '$suffix'"
echo " Result: $artifact_name"
echo ""
done
echo "Current configuration will produce:"
current_suffix=$(get_artifact_suffix "$CURRENT_VERSION")
current_artifact="imex-partner-${current_suffix}x64.dmg"
echo " $current_artifact"

View File

@@ -22,8 +22,9 @@ win:
endpoint: https://eus.codesigning.azure.net endpoint: https://eus.codesigning.azure.net
certificateProfileName: ImEXRPS certificateProfileName: ImEXRPS
codeSigningAccountName: ImEX codeSigningAccountName: ImEX
publisherName: ImEX Systems Inc.
nsis: nsis:
artifactName: imex-partner-${arch}.${ext} artifactName: imex-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
shortcutName: ${productName} shortcutName: ${productName}
uninstallDisplayName: ${productName} uninstallDisplayName: ${productName}
createDesktopShortcut: always createDesktopShortcut: always
@@ -49,17 +50,9 @@ mac:
arch: arch:
- x64 - x64
dmg: dmg:
artifactName: imex-partner-${arch}.${ext} artifactName: imex-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
linux:
target:
- AppImage
- snap
- deb
maintainer: electronjs.org
category: Utility
desktop: scripts/imex-shop-partner.desktop
appImage: appImage:
artifactName: imex-partner-${arch}.${ext} artifactName: imex-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
npmRebuild: false npmRebuild: false
publish: publish:
provider: s3 provider: s3

View File

@@ -22,8 +22,9 @@ win:
endpoint: https://eus.codesigning.azure.net endpoint: https://eus.codesigning.azure.net
certificateProfileName: ImEXRPS certificateProfileName: ImEXRPS
codeSigningAccountName: ImEX codeSigningAccountName: ImEX
publisherName: ImEX Systems Inc.
nsis: nsis:
artifactName: rome-partner-${arch}.${ext} artifactName: rome-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
shortcutName: ${productName} shortcutName: ${productName}
uninstallDisplayName: ${productName} uninstallDisplayName: ${productName}
createDesktopShortcut: always createDesktopShortcut: always
@@ -50,17 +51,9 @@ mac:
arch: arch:
- x64 - x64
dmg: dmg:
artifactName: rome-partner-${arch}.${ext} artifactName: rome-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
linux:
target:
- AppImage
- snap
- deb
maintainer: electronjs.org
category: Utility
desktop: scripts/rome-shop-partner.desktop
appImage: appImage:
artifactName: rome-partner-${arch}.${ext} artifactName: rome-partner-${env.ARTIFACT_SUFFIX}${arch}.${ext}
npmRebuild: false npmRebuild: false
publish: publish:
provider: s3 provider: s3

View File

@@ -6,10 +6,18 @@ import react from "@vitejs/plugin-react";
export default defineConfig({ export default defineConfig({
main: { main: {
plugins: [ plugins: [
externalizeDepsPlugin(), externalizeDepsPlugin({
exclude: ["electron-store"],
}),
sentryVitePlugin({ sentryVitePlugin({
org: "imex", org: "imex",
project: "imex-partner", project: "imex-partner",
sourcemaps: {
filesToDeleteAfterUpload: ["**.js.map"],
},
release: {
name: `bodyshop-desktop@${process.env.npm_package_version}`,
},
}), }),
], ],
build: { build: {
@@ -32,6 +40,12 @@ export default defineConfig({
sentryVitePlugin({ sentryVitePlugin({
org: "imex", org: "imex",
project: "imex-partner", project: "imex-partner",
sourcemaps: {
filesToDeleteAfterUpload: ["**.js.map"],
},
release: {
name: `bodyshop-desktop@${process.env.npm_package_version}`,
},
}), }),
], ],
build: { build: {

5144
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "bodyshop-desktop", "name": "bodyshop-desktop",
"version": "1.0.4", "version": "1.0.8",
"description": "Shop Management System Partner", "description": "Shop Management System Partner",
"main": "./out/main/index.js", "main": "./out/main/index.js",
"author": "Convenient Brands, LLC", "author": "Convenient Brands, LLC",
@@ -13,79 +13,79 @@
"typecheck": "npm run typecheck:node && npm run typecheck:web", "typecheck": "npm run typecheck:node && npm run typecheck:web",
"start": "electron-vite preview", "start": "electron-vite preview",
"dev": "electron-vite dev", "dev": "electron-vite dev",
"build:imex": "electron-vite build --mode imex && electron-builder --config electron-builder.imex.yml", "build:imex": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --config electron-builder.imex.yml",
"build:rome": "electron-vite build --mode rome && electron-builder --config electron-builder.rome.yml", "build:rome": "node deploy/set-artifact-name.js electron-vite build --mode rome && node deploy/set-artifact-name.js electron-builder --config electron-builder.rome.yml",
"build:imex:publish": "electron-vite build --mode imex && electron-builder --config electron-builder.imex.yml --publish always", "build:imex:publish": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --config electron-builder.imex.yml --publish always",
"build:rome:publish": "electron-vite build --mode rome && electron-builder --config electron-builder.rome.yml --publish always", "build:rome:publish": "node deploy/set-artifact-name.js electron-vite build --mode rome && node deploy/set-artifact-name.js electron-builder --config electron-builder.rome.yml --publish always",
"build:imex:linux": "electron-vite build --mode imex && electron-builder --config electron-builder.imex.yml --linux", "build:imex:linux": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --config electron-builder.imex.yml --linux",
"build:rome:linux": "electron-vite build --mode rome && electron-builder --config electron-builder.rome.yml --linux", "build:rome:linux": "node deploy/set-artifact-name.js electron-vite build --mode rome && node deploy/set-artifact-name.js electron-builder --config electron-builder.rome.yml --linux",
"postinstall": "electron-builder install-app-deps", "postinstall": "electron-builder install-app-deps",
"build:unpack": "electron-vite build --mode imex && electron-builder --dir", "build:unpack": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --dir",
"build:win": "electron-vite build --mode imex && electron-builder --win", "build:win": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --win",
"build:mac": "electron-vite build --mode imex && electron-builder --mac", "build:mac": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --mac",
"build:linux": "electron-vite build --mode imex && electron-builder --linux" "build:linux": "node deploy/set-artifact-name.js electron-vite build --mode imex && node deploy/set-artifact-name.js electron-builder --linux"
}, },
"dependencies": { "dependencies": {
"@apollo/client": "^3.13.6", "@apollo/client": "^3.13.6",
"@electron-toolkit/preload": "^3.0.1", "@electron-toolkit/preload": "^3.0.2",
"@electron-toolkit/utils": "^4.0.0", "@electron-toolkit/utils": "^4.0.0",
"@sentry/electron": "^6.5.0", "@sentry/electron": "^7.2.0",
"@sentry/vite-plugin": "^3.3.1", "@sentry/vite-plugin": "^4.5.0",
"axios": "^1.9.0", "axios": "^1.12.2",
"dayjs": "^1.11.13", "dayjs": "^1.11.18",
"electron-log": "^5.3.3", "electron-log": "^5.4.3",
"electron-store": "^8.2.0",
"electron-updater": "^6.6.2", "electron-updater": "^6.6.2",
"winax": "^3.6.2" "winax": "^3.6.2"
}, },
"devDependencies": { "devDependencies": {
"@ant-design/v5-patch-for-react-19": "^1.0.3", "@ant-design/v5-patch-for-react-19": "^1.0.3",
"@electron-toolkit/eslint-config-prettier": "^3.0.0", "@electron-toolkit/eslint-config-prettier": "^3.0.0",
"@electron-toolkit/eslint-config-ts": "^3.0.0", "@electron-toolkit/eslint-config-ts": "^3.1.0",
"@electron-toolkit/tsconfig": "^1.0.1", "@electron-toolkit/tsconfig": "^2.0.0",
"@playwright/test": "^1.51.1", "@playwright/test": "^1.56.1",
"@reduxjs/toolkit": "^2.6.1", "@reduxjs/toolkit": "^2.9.1",
"@types/cors": "^2.8.17", "@types/cors": "^2.8.19",
"@types/express": "^5.0.1", "@types/express": "^5.0.3",
"@types/lodash": "^4.17.16", "@types/lodash": "^4.17.20",
"@types/node": "^22.14.0", "@types/node": "^24.9.1",
"@types/node-cron": "^3.0.11", "@types/node-cron": "^3.0.11",
"@types/react": "^19.1.0", "@types/react": "^19.2.2",
"@types/react-dom": "^19.1.2", "@types/react-dom": "^19.2.2",
"@types/xml2js": "^0.4.14", "@types/xml2js": "^0.4.14",
"@vitejs/plugin-react": "^4.3.4", "@vitejs/plugin-react": "^5.0.4",
"antd": "^5.24.6", "antd": "^5.27.6",
"archiver": "^7.0.1", "archiver": "^7.0.1",
"chokidar": "^4.0.3", "chokidar": "^4.0.3",
"cors": "^2.8.5", "cors": "^2.8.5",
"cross-env": "^10.1.0",
"dbffile": "^1.12.0", "dbffile": "^1.12.0",
"electron": "^35.1.5", "electron": "^38.3.0",
"electron-builder": "^25.1.8", "electron-builder": "^26.0.12",
"electron-store": "^8.2.0", "electron-store": "^11.0.2",
"electron-vite": "^3.1.0", "electron-vite": "^4.0.1",
"eslint": "^9.24.0", "eslint": "^9.38.0",
"eslint-plugin-react": "^7.37.5", "eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^5.2.0", "eslint-plugin-react-hooks": "^7.0.0",
"eslint-plugin-react-refresh": "^0.4.19", "eslint-plugin-react-refresh": "^0.4.24",
"express": "^5.1.0", "express": "^5.1.0",
"firebase": "^11.6.0", "firebase": "^12.4.0",
"graphql": "^16.10.0", "graphql": "^16.11.0",
"graphql-request": "^7.1.2", "graphql-request": "^7.3.1",
"i18next": "^24.2.3", "i18next": "^25.6.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"node-cron": "^3.0.3", "node-cron": "^4.2.1",
"playwright": "^1.51.1", "playwright": "^1.56.1",
"prettier": "^3.5.3", "prettier": "^3.6.2",
"react": "^19.1.0", "react": "^19.2.0",
"react-dom": "^19.1.0", "react-dom": "^19.2.0",
"react-error-boundary": "^5.0.0", "react-error-boundary": "^6.0.0",
"react-i18next": "^15.4.1", "react-i18next": "^16.1.4",
"react-redux": "^9.2.0", "react-redux": "^9.2.0",
"react-router": "^7.5.0", "react-router": "^7.9.4",
"redux-logger": "^3.0.6", "redux-logger": "^3.0.6",
"typescript": "^5.8.3", "typescript": "^5.9.3",
"vite": "6.2.6", "vite": "7.1.11",
"xml2js": "^0.6.2", "xml2js": "^0.6.2",
"xmlbuilder2": "^3.1.1" "xmlbuilder2": "^4.0.0"
} }
} }

View File

@@ -182,7 +182,9 @@ const DecodeAD1 = async (
if (!rawAd1Data.ownr_ph1 || _.isEmpty(rawAd1Data.ownr_ph1)) { if (!rawAd1Data.ownr_ph1 || _.isEmpty(rawAd1Data.ownr_ph1)) {
rawAd1Data.ownr_ph1 = rawAd1Data.ownr_ph2; rawAd1Data.ownr_ph1 = rawAd1Data.ownr_ph2;
} }
if (rawAd1Data.clm_no === "") {
rawAd1Data.clm_no = undefined;
}
let ownerRecord: OwnerRecordInterface; let ownerRecord: OwnerRecordInterface;
//Check if the owner information is there. If not, use the insured information as a fallback. //Check if the owner information is there. If not, use the insured information as a fallback.
if ( if (

View File

@@ -157,14 +157,16 @@ async function ImportJob(filepath: string): Promise<void> {
console.log("Available Job record to upload;", newAvailableJob); console.log("Available Job record to upload;", newAvailableJob);
setAppProgressbar(0.95); setAppProgressbar(0.95);
const existingJobRecord: QueryJobByClmNoResult = await client.request( if (jobObject.clm_no) {
QUERY_JOB_BY_CLM_NO_TYPED, const existingJobRecord: QueryJobByClmNoResult = await client.request(
{ clm_no: jobObject.clm_no }, QUERY_JOB_BY_CLM_NO_TYPED,
); { clm_no: jobObject.clm_no },
);
if (existingJobRecord.jobs.length > 0) { if (existingJobRecord.jobs.length > 0) {
newAvailableJob.issupplement = true; newAvailableJob.issupplement = true;
newAvailableJob.jobid = existingJobRecord.jobs[0].id; newAvailableJob.jobid = existingJobRecord.jobs[0].id;
}
} }
const insertRecordResult: InsertAvailableJobResult = await client.request( const insertRecordResult: InsertAvailableJobResult = await client.request(
@@ -353,63 +355,77 @@ export function ReplaceOwnerInfoWithClaimant<
| "clmt_ph1" | "clmt_ph1"
| "clmt_ph2" | "clmt_ph2"
| "clmt_ea" | "clmt_ea"
| "insd_ln"
| "insd_fn"
| "insd_title"
| "insd_co_nm"
| "insd_addr1"
| "insd_addr2"
| "insd_city"
| "insd_st"
| "insd_zip"
| "insd_ctry"
| "insd_ph1"
| "insd_ph2"
| "insd_ea"
| "owner" | "owner"
> >
>, >,
>(jobObject: T): T { >(jobObject: T): T {
// In some scenarios, the owner information is missing. So we use the claimant instead. // Promote claimant data first if owner identity is entirely missing; otherwise fallback to insured data.
// We pull the claimant info for this, but we don't store it in our system, so it needs to be deleted regardless. const identityKeys = ["ln", "fn", "co_nm"] as const; // keys used to determine presence
if ( const copyKeys = [
_.isEmpty(jobObject.ownr_ln) && "ln",
_.isEmpty(jobObject.ownr_fn) && "fn",
_.isEmpty(jobObject.ownr_co_nm) "title",
) { "co_nm",
jobObject.ownr_ln = jobObject.clmt_ln; "addr1",
jobObject.ownr_fn = jobObject.clmt_fn; "addr2",
jobObject.ownr_title = jobObject.clmt_title; "city",
jobObject.ownr_co_nm = jobObject.clmt_co_nm; "st",
jobObject.ownr_addr1 = jobObject.clmt_addr1; "zip",
jobObject.ownr_addr2 = jobObject.clmt_addr2; "ctry",
jobObject.ownr_city = jobObject.clmt_city; "ph1",
jobObject.ownr_st = jobObject.clmt_st; "ph2",
jobObject.ownr_zip = jobObject.clmt_zip; "ea",
jobObject.ownr_ctry = jobObject.clmt_ctry; ] as const; // full set of fields to copy/delete
jobObject.ownr_ph1 = jobObject.clmt_ph1;
jobObject.ownr_ph2 = jobObject.clmt_ph2;
jobObject.ownr_ea = jobObject.clmt_ea;
// Ensure the owner and owner.data fields exist before assigning values const ownerMissing = identityKeys.every((k) =>
if (jobObject.owner?.data) { _.isEmpty((jobObject as any)[`ownr_${k}`]),
jobObject.owner.data.ownr_ln = jobObject.clmt_ln; );
jobObject.owner.data.ownr_fn = jobObject.clmt_fn; const claimantHasSome = identityKeys.some(
jobObject.owner.data.ownr_title = jobObject.clmt_title; (k) => !_.isEmpty((jobObject as any)[`clmt_${k}`]),
jobObject.owner.data.ownr_co_nm = jobObject.clmt_co_nm; );
jobObject.owner.data.ownr_addr1 = jobObject.clmt_addr1; const claimantMissing = identityKeys.every((k) =>
jobObject.owner.data.ownr_addr2 = jobObject.clmt_addr2; _.isEmpty((jobObject as any)[`clmt_${k}`]),
jobObject.owner.data.ownr_city = jobObject.clmt_city; );
jobObject.owner.data.ownr_st = jobObject.clmt_st;
jobObject.owner.data.ownr_zip = jobObject.clmt_zip; const { owner } = jobObject as any; // destructure for optional nested updates
jobObject.owner.data.ownr_ctry = jobObject.clmt_ctry;
jobObject.owner.data.ownr_ph1 = jobObject.clmt_ph1; // Copy helper inline (no extra function as requested)
jobObject.owner.data.ownr_ph2 = jobObject.clmt_ph2; const promote = (sourcePrefix: "clmt" | "insd"): void => {
jobObject.owner.data.ownr_ea = jobObject.clmt_ea; copyKeys.forEach((suffix) => {
} (jobObject as any)[`ownr_${suffix}`] = (jobObject as any)[
`${sourcePrefix}_${suffix}`
];
if (owner?.data) {
owner.data[`ownr_${suffix}`] = (jobObject as any)[
`${sourcePrefix}_${suffix}`
];
}
});
};
if (ownerMissing && claimantHasSome) {
promote("clmt");
} else if (ownerMissing && claimantMissing) {
promote("insd");
} }
// Delete the claimant info as it's not needed. // Delete the claimant info as it's not needed.
delete jobObject.clmt_ln; copyKeys.forEach((suffix) => delete (jobObject as any)[`clmt_${suffix}`]);
delete jobObject.clmt_fn; // Delete the insured info as it's not needed.
delete jobObject.clmt_title; copyKeys.forEach((suffix) => delete (jobObject as any)[`insd_${suffix}`]);
delete jobObject.clmt_co_nm;
delete jobObject.clmt_addr1;
delete jobObject.clmt_addr2;
delete jobObject.clmt_city;
delete jobObject.clmt_st;
delete jobObject.clmt_zip;
delete jobObject.clmt_ctry;
delete jobObject.clmt_ph1;
delete jobObject.clmt_ph2;
delete jobObject.clmt_ea;
return jobObject; return jobObject;
} }

View File

@@ -13,127 +13,36 @@ const EmsPartsOrderGenerateAd2File = async (
try { try {
const records = [ const records = [
{ {
INS_CO_ID: partsOrder.job.ins_co_nm, EST_CO_NM: partsOrder.job.est_co_nm,
INS_CO_NM: partsOrder.job.ins_co_nm, EST_ADDR1: partsOrder.job.est_addr1,
INS_ADDR1: partsOrder.job.ins_addr1, EST_ADDR2: partsOrder.job.est_addr2,
INS_ADDR2: partsOrder.job.ins_addr2, EST_CITY: partsOrder.job.est_city,
INS_CITY: partsOrder.job.ins_city, EST_ST: partsOrder.job.est_st,
INS_ST: partsOrder.job.ins_st, EST_ZIP: partsOrder.job.est_zip,
INS_ZIP: partsOrder.job.ins_zip, EST_CTRY: partsOrder.job.est_ctry,
INS_CTRY: partsOrder.job.ins_ctry, EST_PH1: partsOrder.job.est_ph1,
INS_PH1: partsOrder.job.ins_ph1, EST_CT_LN: partsOrder.job.est_ct_ln,
INS_PH1X: partsOrder.job.ins_ph1x, EST_CT_FN: partsOrder.job.est_ct_fn,
INS_PH2: partsOrder.job.ins_ph2, EST_EA: partsOrder.job.est_ea,
INS_PH2X: partsOrder.job.ins_ph2x, CLMT_ADDR1: partsOrder.job.clm_addr1,
INS_FAX: partsOrder.job.ins_fax, CLMT_ADDR2: partsOrder.job.clm_addr2,
INS_FAXX: partsOrder.job.ins_faxx, CLMT_CITY: partsOrder.job.clm_city,
INS_CT_LN: partsOrder.job.ins_ct_ln, CLMT_ST: partsOrder.job.clm_st,
INS_CT_FN: partsOrder.job.ins_ct_fn, CLMT_ZIP: partsOrder.job.clm_zip,
INS_TITLE: partsOrder.job.ins_title, CLMT_CTRY: partsOrder.job.clm_ctry,
INS_CT_PH: partsOrder.job.ins_ct_ph, CLMT_PH1: partsOrder.job.clm_ph1,
INS_CT_PHX: partsOrder.job.ins_ct_phx, CLMT_PH1X: partsOrder.job.clm_ph1x,
INS_EA: partsOrder.job.ins_ea, CLMT_PH2: partsOrder.job.clm_ph2,
INS_MEMO: partsOrder.job.ins_memo, CLMT_PH2X: partsOrder.job.clm_ph2x,
POLICY_NO: partsOrder.job.policy_no, CLMT_FAX: partsOrder.job.clm_fax,
DED_AMT: partsOrder.job.ded_amt, CLMT_FAXX: partsOrder.job.clm_faxx,
DED_STATUS: partsOrder.job.ded_status, CLMT_LN: partsOrder.job.clm_ct_ln,
ASGN_NO: partsOrder.job.asgn_no, CLMT_FN: partsOrder.job.clm_ct_fn,
ASGN_DATE: partsOrder.job.asgn_date CLMT_TITLE: partsOrder.job.clm_title,
? new Date(partsOrder.job.asgn_date) CLMT_CT_PH: partsOrder.job.clm_ct_ph,
: null, CLMT_CT_PHX: partsOrder.job.clm_ct_phx,
ASGN_TYPE: partsOrder.job.asgn_type, CLMT_EA: partsOrder.job.clm_ea,
CLM_NO: partsOrder.job.clm_no, RF_CO_NM: partsOrder.job.bodyshop.shopname,
CLM_OFC_ID: partsOrder.job.clm_ofc_id,
CLM_OFC_NM: partsOrder.job.clm_ofc_nm,
CLM_ADDR1: partsOrder.job.clm_addr1,
CLM_ADDR2: partsOrder.job.clm_addr2,
CLM_CITY: partsOrder.job.clm_city,
CLM_ST: partsOrder.job.clm_st,
CLM_ZIP: partsOrder.job.clm_zip,
CLM_CTRY: partsOrder.job.clm_ctry,
CLM_PH1: partsOrder.job.clm_ph1,
CLM_PH1X: partsOrder.job.clm_ph1x,
CLM_PH2: partsOrder.job.clm_ph2,
CLM_PH2X: partsOrder.job.clm_ph2x,
CLM_FAX: partsOrder.job.clm_fax,
CLM_FAXX: partsOrder.job.clm_faxx,
CLM_CT_LN: partsOrder.job.clm_ct_ln,
CLM_CT_FN: partsOrder.job.clm_ct_fn,
CLM_TITLE: partsOrder.job.clm_title,
CLM_CT_PH: partsOrder.job.clm_ct_ph,
CLM_CT_PHX: partsOrder.job.clm_ct_phx,
CLM_EA: partsOrder.job.clm_ea,
PAYEE_NMS: partsOrder.job.payee_nms,
PAY_TYPE: partsOrder.job.pay_type,
PAY_DATE: partsOrder.job.pay_date,
PAY_CHKNM: null, // Explicitly set to null as in original code
PAY_AMT: null, // Explicitly set to null as in original code
PAY_MEMO: partsOrder.job.pay_memo,
AGT_CO_ID: partsOrder.job.agt_co_id,
AGT_CO_NM: partsOrder.job.agt_co_nm,
AGT_ADDR1: partsOrder.job.agt_addr1,
AGT_ADDR2: partsOrder.job.agt_addr2,
AGT_CITY: partsOrder.job.agt_city,
AGT_ST: partsOrder.job.agt_st,
AGT_ZIP: partsOrder.job.agt_zip,
AGT_CTRY: partsOrder.job.agt_ctry,
AGT_PH1: partsOrder.job.agt_ph1,
AGT_PH1X: partsOrder.job.agt_ph1x,
AGT_PH2: partsOrder.job.agt_ph2,
AGT_PH2X: partsOrder.job.agt_ph2x,
AGT_FAX: partsOrder.job.agt_fax,
AGT_FAXX: partsOrder.job.agt_faxx,
AGT_CT_LN: partsOrder.job.agt_ct_ln,
AGT_CT_FN: partsOrder.job.agt_ct_fn,
AGT_CT_PH: partsOrder.job.agt_ct_ph,
AGT_CT_PHX: partsOrder.job.agt_ct_phx,
AGT_EA: partsOrder.job.agt_ea,
AGT_LIC_NO: partsOrder.job.agt_lic_no,
LOSS_DATE: partsOrder.job.loss_date
? new Date(partsOrder.job.loss_date)
: null,
LOSS_CAT: null, // Explicitly set to null as in original code
LOSS_TYPE: null, // Explicitly set to null as in original code
LOSS_DESC: partsOrder.job.loss_desc,
THEFT_IND: null, // Explicitly set to null as in original code
CAT_NO: partsOrder.job.cat_no,
TLOS_IND: null, // Explicitly set to null as in original code
LOSS_MEMO: partsOrder.job.loss_memo,
CUST_PR: partsOrder.job.cust_pr,
INSD_LN: partsOrder.job.insd_ln,
INSD_FN: partsOrder.job.insd_fn,
INSD_TITLE: partsOrder.job.insd_title,
INSD_CO_NM: partsOrder.job.insd_co_nm,
INSD_ADDR1: partsOrder.job.insd_addr1,
INSD_ADDR2: partsOrder.job.insd_addr2,
INSD_CITY: partsOrder.job.insd_city,
INSD_ST: partsOrder.job.insd_st,
INSD_ZIP: partsOrder.job.insd_zip,
INSD_CTRY: partsOrder.job.insd_ctry,
INSD_PH1: partsOrder.job.insd_ph1,
INSD_PH1X: partsOrder.job.insd_ph1x,
INSD_PH2: partsOrder.job.insd_ph2,
INSD_PH2X: partsOrder.job.insd_ph2x,
INSD_FAX: partsOrder.job.insd_fax,
INSD_FAXX: partsOrder.job.insd_faxx,
INSD_EA: partsOrder.job.insd_ea,
OWNR_LN: partsOrder.job.ownr_ln,
OWNR_FN: partsOrder.job.ownr_fn,
OWNR_TITLE: partsOrder.job.ownr_title,
OWNR_CO_NM: partsOrder.job.ownr_co_nm,
OWNR_ADDR1: partsOrder.job.ownr_addr1,
OWNR_ADDR2: partsOrder.job.ownr_addr2,
OWNR_CITY: partsOrder.job.ownr_city,
OWNR_ST: partsOrder.job.ownr_st,
OWNR_ZIP: partsOrder.job.ownr_zip,
OWNR_CTRY: partsOrder.job.ownr_ctry,
OWNR_PH1: partsOrder.job.ownr_ph1,
OWNR_PH1X: partsOrder.job.ownr_ph1x,
OWNR_PH2: partsOrder.job.ownr_ph2,
OWNR_PH2X: partsOrder.job.ownr_ph2x,
OWNR_FAX: partsOrder.job.ownr_fax,
OWNR_FAXX: partsOrder.job.ownr_faxx,
OWNR_EA: partsOrder.job.ownr_ea,
}, },
]; ];

View File

@@ -11,18 +11,51 @@ const EmsPartsOrderGenerateEnvFile = async (
partsOrder: EmsPartsOrder, partsOrder: EmsPartsOrder,
): Promise<boolean> => { ): Promise<boolean> => {
try { try {
const dateNow = new Date();
const formatTime = (date: Date): string =>
`${date.getHours().toString().padStart(2, "0")}${date.getMinutes().toString().padStart(2, "0")}${date.getSeconds().toString().padStart(2, "0")}`;
const {
job: { ro_number, ciecaid },
} = partsOrder;
// Find the highest line_ind value
const lineInds = partsOrder.parts_order_lines.map(
(line) => line.jobline.line_ind,
);
const getNumber = (str: string): number => {
const match = str.match(/(\d+)$/);
return match ? parseInt(match[1], 10) : 0;
};
const highestLineInd = lineInds.reduce(
(max, current) => (getNumber(current) > getNumber(max) ? current : max),
lineInds[0] || "",
);
const records = [ const records = [
{ {
EST_SYSTEM: "C", EST_SYSTEM: "M",
RO_ID: partsOrder.job.ro_number, SW_VERSION: "25.3",
ESTFILE_ID: partsOrder.job.ciecaid, DB_VERSION: "OCT_25_V",
DB_DATE: dateNow,
RO_ID: ro_number,
ESTFILE_ID: ciecaid,
SUPP_NO: highestLineInd ? getNumber(highestLineInd).toString() : "1",
EST_CTRY: "CAN",
TOP_SECRET: "00000000-0000-0000-0000-000000000000",
TRANS_TYPE: highestLineInd ? highestLineInd.charAt(0) : "S",
STATUS: false, STATUS: false,
CREATE_DT: dateNow,
CREATE_TM: formatTime(dateNow),
TRANSMT_DT: dateNow,
TRANSMT_TM: formatTime(dateNow),
INCL_ADMIN: true, INCL_ADMIN: true,
INCL_VEH: true, INCL_VEH: true,
INCL_EST: true, INCL_EST: true,
INCL_PROFL: true, INCL_PROFL: false,
INCL_TOTAL: true, INCL_TOTAL: false,
INCL_VENDR: false, INCL_VENDR: false,
EMS_VER: "2.0",
}, },
]; ];

View File

@@ -18,9 +18,7 @@ const EmsPartsOrderGenerateLinFile = async (
TRAN_CODE: partsOrderLine.jobline?.tran_code ?? "1", TRAN_CODE: partsOrderLine.jobline?.tran_code ?? "1",
DB_REF: partsOrderLine.jobline?.db_ref, DB_REF: partsOrderLine.jobline?.db_ref,
UNQ_SEQ: partsOrderLine.jobline?.unq_seq, UNQ_SEQ: partsOrderLine.jobline?.unq_seq,
WHO_PAYS: partsOrderLine.jobline?.who_pays, PART_DES_J: false,
PART_DESCJ: partsOrderLine.jobline?.part_descj,
LINE_DESC: partsOrderLine.jobline?.line_desc, LINE_DESC: partsOrderLine.jobline?.line_desc,
PART_TYPE: PART_TYPE:
partsOrderLine.priceChange === true partsOrderLine.priceChange === true

View File

@@ -17,7 +17,7 @@ const EmsPartsOrderGeneratePflFile = async (
let records; let records;
if (partsOrder.job.cieca_pfl && !_.isEmpty(partsOrder.job.cieca_pfl)) { if (partsOrder.job.cieca_pfl && !_.isEmpty(partsOrder.job.cieca_pfl)) {
Object.keys(partsOrder.job.cieca_pfl).map((key) => { records = Object.keys(partsOrder.job.cieca_pfl).map((key) => {
const record: DecodedPflLine = partsOrder.job.cieca_pfl[key]; const record: DecodedPflLine = partsOrder.job.cieca_pfl[key];
return uppercaseObjectKeys(record); return uppercaseObjectKeys(record);
}); });

View File

@@ -4,6 +4,7 @@ import { GraphQLClient, RequestMiddleware } from "graphql-request";
import errorTypeCheck from "../../util/errorTypeCheck.js"; import errorTypeCheck from "../../util/errorTypeCheck.js";
import ipcTypes from "../../util/ipcTypes.json"; import ipcTypes from "../../util/ipcTypes.json";
import store from "../store/store.js"; import store from "../store/store.js";
import getMainWindow from "../../util/getMainWindow.js";
const requestMiddleware: RequestMiddleware = async (request) => { const requestMiddleware: RequestMiddleware = async (request) => {
const token = await getTokenFromRenderer(); const token = await getTokenFromRenderer();
@@ -32,9 +33,9 @@ const client: GraphQLClient = new GraphQLClient(
export async function getTokenFromRenderer(): Promise<string> { export async function getTokenFromRenderer(): Promise<string> {
return new Promise((resolve) => { return new Promise((resolve) => {
try { try {
const mainWindow = BrowserWindow.getAllWindows()[0]; //TODO: Filter to only main window once a proper key has been set. const mainWindow = getMainWindow();
//TODO: Verify that this will work if the app is minimized/closed. //TODO: Verify that this will work if the app is minimized/closed.
mainWindow.webContents.send(ipcTypes.toRenderer.user.getToken); mainWindow?.webContents.send(ipcTypes.toRenderer.user.getToken);
} catch (error) { } catch (error) {
log.error( log.error(
"Unable to send request to renderer process for token", "Unable to send request to renderer process for token",

View File

@@ -36,6 +36,7 @@ import {
setupKeepAliveTask, setupKeepAliveTask,
} from "./setup-keep-alive-task"; } from "./setup-keep-alive-task";
import ensureWindowOnScreen from "./util/ensureWindowOnScreen"; import ensureWindowOnScreen from "./util/ensureWindowOnScreen";
import ongoingMemoryDump, { dumpMemoryStatsToFile } from "../util/memUsage";
const appIconToUse = const appIconToUse =
import.meta.env.VITE_COMPANY === "IMEX" ? imexAppIcon : romeAppIcon; import.meta.env.VITE_COMPANY === "IMEX" ? imexAppIcon : romeAppIcon;
@@ -45,6 +46,13 @@ Sentry.init({
}); });
log.initialize(); log.initialize();
// Configure log format to include process ID
log.transports.file.format =
"[{y}-{m}-{d} {h}:{i}:{s}.{ms}] [{level}] [PID:{processId}] {text}";
log.transports.console.format =
"[{y}-{m}-{d} {h}:{i}:{s}.{ms}] [{level}] [PID:{processId}] {text}";
log.transports.file.maxSize = 50 * 1024 * 1024; // 50 MB
const isMac: boolean = process.platform === "darwin"; const isMac: boolean = process.platform === "darwin";
const protocol: string = "imexmedia"; const protocol: string = "imexmedia";
let isAppQuitting = false; //Needed on Mac as an override to allow us to fully quit the app. let isAppQuitting = false; //Needed on Mac as an override to allow us to fully quit the app.
@@ -53,6 +61,14 @@ let isKeepAliveLaunch = false; // Track if launched via keep-alive
const localServer = new LocalServer(); const localServer = new LocalServer();
const gotTheLock = app.requestSingleInstanceLock(); const gotTheLock = app.requestSingleInstanceLock();
if (!gotTheLock) {
log.warn(
"Another instance is already running and could not obtain mutex lock. Exiting this instance.",
);
isAppQuitting = true;
app.quit(); // Quit the app if another instance is already running
}
function createWindow(): void { function createWindow(): void {
// Create the browser window. // Create the browser window.
const { width, height, x, y } = store.get("app.windowBounds") as { const { width, height, x, y } = store.get("app.windowBounds") as {
@@ -252,6 +268,27 @@ function createWindow(): void {
{ {
type: "separator", type: "separator",
}, },
{
label: "Enable Memory Logging",
checked: store.get("settings.enableMemDebug") as boolean,
type: "checkbox",
click: (): void => {
const currentSetting = store.get(
"settings.enableMemDebug",
) as boolean;
store.set("settings.enableMemDebug", !currentSetting);
log.info("Enable Memory Logging set to", !currentSetting);
},
},
{
label: "Dump Memory Stats Now",
click: (): void => {
dumpMemoryStatsToFile();
},
},
{
type: "separator",
},
// { // {
// label: "Decode Hardcoded Estimate", // label: "Decode Hardcoded Estimate",
// click: (): void => { // click: (): void => {
@@ -429,9 +466,6 @@ function createWindow(): void {
} }
} }
if (!gotTheLock) {
app.quit(); // Quit the app if another instance is already running
}
// This method will be called when Electron has finished // This method will be called when Electron has finished
// initialization and is ready to create browser windows. // initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs. // Some APIs can only be used after this event occurs.
@@ -439,6 +473,8 @@ app.whenReady().then(async () => {
// Default open or close DevTools by F12 in development // Default open or close DevTools by F12 in development
// and ignore CommandOrControl + R in production. // and ignore CommandOrControl + R in production.
// see https://github.com/alex8088/electron-toolkit/tree/master/packages/utils // see https://github.com/alex8088/electron-toolkit/tree/master/packages/utils
log.debug("App is ready, initializing shortcuts and protocol handlers.");
if (platform.isWindows) { if (platform.isWindows) {
app.setAppUserModelId("Shop Partner"); app.setAppUserModelId("Shop Partner");
} }
@@ -470,21 +506,6 @@ app.whenReady().then(async () => {
log.warn("Failed to register protocol handler."); log.warn("Failed to register protocol handler.");
} }
// Add this event handler for second instance
app.on("second-instance", (_event: Electron.Event, argv: string[]) => {
const url = argv.find((arg) => arg.startsWith(`${protocol}://`));
if (url) {
if (url.startsWith(`${protocol}://keep-alive`)) {
log.info("Keep-alive protocol received, app is already running.");
// Do nothing if already running
return;
} else {
openInExplorer(url);
}
}
// No action taken if no URL is provided
});
//Dynamically load ipcMain handlers once ready. //Dynamically load ipcMain handlers once ready.
try { try {
const { initializeCronTasks } = await import("./ipc/ipcMainConfig"); const { initializeCronTasks } = await import("./ipc/ipcMainConfig");
@@ -531,7 +552,7 @@ app.whenReady().then(async () => {
//Check for app updates. //Check for app updates.
autoUpdater.logger = log; autoUpdater.logger = log;
autoUpdater.allowDowngrade = true;
// if (import.meta.env.DEV) { // if (import.meta.env.DEV) {
// // Useful for some dev/debugging tasks, but download can // // Useful for some dev/debugging tasks, but download can
// // not be validated because dev app is not signed // // not be validated because dev app is not signed
@@ -546,19 +567,19 @@ app.whenReady().then(async () => {
autoUpdater.on("checking-for-update", () => { autoUpdater.on("checking-for-update", () => {
log.info("Checking for update..."); log.info("Checking for update...");
const mainWindow = BrowserWindow.getAllWindows()[0]; const mainWindow = getMainWindow();
mainWindow?.webContents.send(ipcTypes.toRenderer.updates.checking); mainWindow?.webContents.send(ipcTypes.toRenderer.updates.checking);
}); });
autoUpdater.on("update-available", (info) => { autoUpdater.on("update-available", (info) => {
log.info("Update available.", info); log.info("Update available.", info);
const mainWindow = BrowserWindow.getAllWindows()[0]; const mainWindow = getMainWindow();
mainWindow?.webContents.send(ipcTypes.toRenderer.updates.available, info); mainWindow?.webContents.send(ipcTypes.toRenderer.updates.available, info);
}); });
autoUpdater.on("download-progress", (progress) => { autoUpdater.on("download-progress", (progress) => {
log.info(`Download speed: ${progress.bytesPerSecond}`); log.info(`Download speed: ${progress.bytesPerSecond}`);
log.info(`Downloaded ${progress.percent}%`); log.info(`Downloaded ${progress.percent}%`);
log.info(`Total downloaded ${progress.transferred}/${progress.total}`); log.info(`Total downloaded ${progress.transferred}/${progress.total}`);
const mainWindow = BrowserWindow.getAllWindows()[0]; const mainWindow = getMainWindow();
mainWindow?.webContents.send( mainWindow?.webContents.send(
ipcTypes.toRenderer.updates.downloading, ipcTypes.toRenderer.updates.downloading,
progress, progress,
@@ -566,7 +587,7 @@ app.whenReady().then(async () => {
}); });
autoUpdater.on("update-downloaded", (info) => { autoUpdater.on("update-downloaded", (info) => {
log.info("Update downloaded", info); log.info("Update downloaded", info);
const mainWindow = BrowserWindow.getAllWindows()[0]; const mainWindow = getMainWindow();
mainWindow?.webContents.send(ipcTypes.toRenderer.updates.downloaded, info); mainWindow?.webContents.send(ipcTypes.toRenderer.updates.downloaded, info);
}); });
@@ -577,7 +598,8 @@ app.whenReady().then(async () => {
} }
//The update itself will run when the bodyshop record is queried to know what release channel to use. //The update itself will run when the bodyshop record is queried to know what release channel to use.
createWindow(); openMainWindow();
ongoingMemoryDump();
app.on("activate", function () { app.on("activate", function () {
openMainWindow(); openMainWindow();
@@ -595,6 +617,24 @@ app.on("open-url", (event: Electron.Event, url: string) => {
} }
}); });
// Add this event handler for second instance
app.on("second-instance", (_event: Electron.Event, argv: string[]) => {
const url = argv.find((arg) => arg.startsWith(`${protocol}://`));
if (url) {
if (url.startsWith(`${protocol}://keep-alive`)) {
log.info(
"Keep-alive protocol received, app is already running. Nothing to do.",
);
// Do nothing if already running
return;
} else {
log.info("Received Media URL: ", url);
openInExplorer(url);
}
}
// No action taken if no URL is provided
});
// Quit when all windows are closed, except on macOS. There, it's common // Quit when all windows are closed, except on macOS. There, it's common
// for applications and their menu bar to stay active until the user quits // for applications and their menu bar to stay active until the user quits
// explicitly with Cmd + Q. // explicitly with Cmd + Q.
@@ -604,8 +644,7 @@ app.on("window-all-closed", () => {
} }
}); });
app.on("before-quit", (props) => { app.on("before-quit", () => {
console.log(props);
preQuitMethods(); preQuitMethods();
}); });
@@ -635,7 +674,7 @@ function preQuitMethods(): void {
} }
function openMainWindow(): void { function openMainWindow(): void {
const mainWindow = BrowserWindow.getAllWindows()[0]; const mainWindow = getMainWindow();
if (mainWindow) { if (mainWindow) {
mainWindow.show(); mainWindow.show();
} else { } else {

View File

@@ -20,10 +20,15 @@ const ipcMainHandleAuthStateChanged = async (
user: User | null, user: User | null,
): Promise<void> => { ): Promise<void> => {
Store.set("user", user); Store.set("user", user);
log.debug("Received authentication state change from Renderer.", user);
await setReleaseChannel();
checkForAppUpdatesContinuously();
};
async function setReleaseChannel() {
try { try {
//Need to query the currently active shop, and store the metadata as well. //Need to query the currently active shop, and store the metadata as well.
//Also need to query the OP Codes for decoding reference. //Also need to query the OP Codes for decoding reference.
log.debug("Received authentication state change from Renderer.", user);
await handleShopMetaDataFetch(); await handleShopMetaDataFetch();
//Check for updates //Check for updates
const bodyshop = Store.get("app.bodyshop"); const bodyshop = Store.get("app.bodyshop");
@@ -33,6 +38,8 @@ const ipcMainHandleAuthStateChanged = async (
} else if (bodyshop?.convenient_company?.toLowerCase() === "beta") { } else if (bodyshop?.convenient_company?.toLowerCase() === "beta") {
autoUpdater.channel = "beta"; autoUpdater.channel = "beta";
log.debug("Setting update channel to BETA channel."); log.debug("Setting update channel to BETA channel.");
} else {
log.debug("Setting update channel to LATEST channel.");
} }
} catch (error) { } catch (error) {
log.error( log.error(
@@ -44,8 +51,7 @@ const ipcMainHandleAuthStateChanged = async (
"Error connecting to ImEX Online servers to get shop data. Please try again.", "Error connecting to ImEX Online servers to get shop data. Please try again.",
); );
} }
checkForAppUpdatesContinuously(); }
};
const handleShopMetaDataFetch = async ( const handleShopMetaDataFetch = async (
reloadWindow?: boolean, reloadWindow?: boolean,
@@ -89,7 +95,8 @@ const ipMainHandleResetPassword = async (): Promise<void> => {
}; };
export { export {
handleShopMetaDataFetch,
ipcMainHandleAuthStateChanged, ipcMainHandleAuthStateChanged,
ipMainHandleResetPassword, ipMainHandleResetPassword,
handleShopMetaDataFetch, setReleaseChannel,
}; };

View File

@@ -14,7 +14,7 @@ const handlePartsPriceChangeRequest = async (
): Promise<void> => { ): Promise<void> => {
//Route handler here only. //Route handler here only.
const { job } = req.body as { job: PpcJob }; const job = req.body as PpcJob;
try { try {
await generatePartsPriceChange(job); await generatePartsPriceChange(job);
res.status(200).json({ success: true }); res.status(200).json({ success: true });

View File

@@ -9,6 +9,7 @@ const store = new Store({
emsOutFilePath: null, emsOutFilePath: null,
qbFilePath: "", qbFilePath: "",
runWatcherOnStartup: true, runWatcherOnStartup: true,
enableMemDebug: false,
polling: { polling: {
enabled: false, enabled: false,
interval: 30000, interval: 30000,

View File

@@ -1,19 +1,23 @@
import { autoUpdater } from "electron-updater"; import { autoUpdater } from "electron-updater";
import { setReleaseChannel } from "../ipc/ipcMainHandler.user";
function checkForAppUpdatesContinuously(): void { let continuousUpdatesTriggered = false;
checkForAppUpdates();
setInterval( async function checkForAppUpdatesContinuously(): Promise<void> {
() => { if (!continuousUpdatesTriggered) {
checkForAppUpdatesContinuously(); continuousUpdatesTriggered = true;
}, checkForAppUpdates();
1000 * 60 * 30, setInterval(
); () => {
checkForAppUpdates();
},
1000 * 60 * 30,
);
}
} }
function checkForAppUpdates(): void { async function checkForAppUpdates(): Promise<void> {
autoUpdater.checkForUpdatesAndNotify({ await setReleaseChannel();
title: "Shop Partner Update", autoUpdater.checkForUpdates();
body: "A new version of Shop Partner is available. Click to update.",
});
} }
export { checkForAppUpdatesContinuously, checkForAppUpdates }; export { checkForAppUpdates, checkForAppUpdatesContinuously };

View File

@@ -45,7 +45,7 @@ export const pfpFieldLineDescriptors: FieldDescriptor[] = [
}, },
{ {
name: "PRT_TX_IN1", name: "PRT_TX_IN1",
type: "C", type: "L",
size: 1, size: 1,
decimalPlaces: 0, decimalPlaces: 0,
}, },
@@ -57,7 +57,7 @@ export const pfpFieldLineDescriptors: FieldDescriptor[] = [
}, },
{ {
name: "PRT_TX_IN2", name: "PRT_TX_IN2",
type: "C", type: "L",
size: 1, size: 1,
decimalPlaces: 0, decimalPlaces: 0,
}, },
@@ -69,7 +69,7 @@ export const pfpFieldLineDescriptors: FieldDescriptor[] = [
}, },
{ {
name: "PRT_TX_IN3", name: "PRT_TX_IN3",
type: "C", type: "L",
size: 1, size: 1,
decimalPlaces: 0, decimalPlaces: 0,
}, },
@@ -81,7 +81,7 @@ export const pfpFieldLineDescriptors: FieldDescriptor[] = [
}, },
{ {
name: "PRT_TX_IN4", name: "PRT_TX_IN4",
type: "C", type: "L",
size: 1, size: 1,
decimalPlaces: 0, decimalPlaces: 0,
}, },
@@ -93,7 +93,7 @@ export const pfpFieldLineDescriptors: FieldDescriptor[] = [
}, },
{ {
name: "PRT_TX_IN5", name: "PRT_TX_IN5",
type: "C", type: "L",
size: 1, size: 1,
decimalPlaces: 0, decimalPlaces: 0,
}, },

View File

@@ -7,6 +7,7 @@ import errorTypeCheck from "../../util/errorTypeCheck";
import ipcTypes from "../../util/ipcTypes.json"; import ipcTypes from "../../util/ipcTypes.json";
import ImportJob from "../decoder/decoder"; import ImportJob from "../decoder/decoder";
import store from "../store/store"; import store from "../store/store";
import getMainWindow from "../../util/getMainWindow";
let watcher: FSWatcher | null; let watcher: FSWatcher | null;
async function StartWatcher(): Promise<boolean> { async function StartWatcher(): Promise<boolean> {
@@ -107,23 +108,23 @@ function addWatcherPath(path: string | string[]): void {
function onWatcherReady(): void { function onWatcherReady(): void {
if (watcher) { if (watcher) {
const mainWindow = BrowserWindow.getAllWindows()[0]; //TODO: Filter to only main window once a proper key has been set. const mainWindow = getMainWindow();
new Notification({ new Notification({
title: "Watcher Started", title: "Watcher Started",
body: "Newly exported estimates will be automatically uploaded.", body: "Newly exported estimates will be automatically uploaded.",
}).show(); }).show();
log.info("Confirmed watched paths:", watcher.getWatched()); log.info("Confirmed watched paths:", watcher.getWatched());
mainWindow.webContents.send(ipcTypes.toRenderer.watcher.started); mainWindow?.webContents.send(ipcTypes.toRenderer.watcher.started);
} }
} }
async function StopWatcher(): Promise<boolean> { async function StopWatcher(): Promise<boolean> {
const mainWindow = BrowserWindow.getAllWindows()[0]; //TODO: Filter to only main window once a proper key has been set. const mainWindow = getMainWindow();
if (watcher) { if (watcher) {
await watcher.close(); await watcher.close();
log.info("Watcher stopped."); log.info("Watcher stopped.");
mainWindow.webContents.send(ipcTypes.toRenderer.watcher.stopped); mainWindow?.webContents.send(ipcTypes.toRenderer.watcher.stopped);
new Notification({ new Notification({
title: "Watcher Stopped", title: "Watcher Stopped",

View File

@@ -1,71 +1,98 @@
import { useState, useEffect } from 'react'; import { useState, useEffect } from "react";
import ipcTypes from '../../../../../util/ipcTypes.json'; import ipcTypes from "../../../../../util/ipcTypes.json";
import { PaintScaleConfig, PaintScaleType } from '../../../../../util/types/paintScale'; import {
PaintScaleConfig,
PaintScaleType,
} from "../../../../../util/types/paintScale";
import { message } from "antd"; import { message } from "antd";
import {useTranslation} from "react-i18next"; import { useTranslation } from "react-i18next";
type ConfigType = 'input' | 'output'; type ConfigType = "input" | "output";
export const usePaintScaleConfig = (configType: ConfigType) => { export const usePaintScaleConfig = (configType: ConfigType) => {
const [paintScaleConfigs, setPaintScaleConfigs] = useState<PaintScaleConfig[]>([]); const [paintScaleConfigs, setPaintScaleConfigs] = useState<
PaintScaleConfig[]
>([]);
const { t } = useTranslation(); const { t } = useTranslation();
// Get the appropriate IPC methods based on config type // Get the appropriate IPC methods based on config type
const getConfigsMethod = configType === 'input' const getConfigsMethod =
configType === "input"
? ipcTypes.toMain.settings.paintScale.getInputConfigs ? ipcTypes.toMain.settings.paintScale.getInputConfigs
: ipcTypes.toMain.settings.paintScale.getOutputConfigs; : ipcTypes.toMain.settings.paintScale.getOutputConfigs;
const setConfigsMethod = configType === 'input' const setConfigsMethod =
configType === "input"
? ipcTypes.toMain.settings.paintScale.setInputConfigs ? ipcTypes.toMain.settings.paintScale.setInputConfigs
: ipcTypes.toMain.settings.paintScale.setOutputConfigs; : ipcTypes.toMain.settings.paintScale.setOutputConfigs;
const setPathMethod = configType === 'input' const setPathMethod =
configType === "input"
? ipcTypes.toMain.settings.paintScale.setInputPath ? ipcTypes.toMain.settings.paintScale.setInputPath
: ipcTypes.toMain.settings.paintScale.setOutputPath; : ipcTypes.toMain.settings.paintScale.setOutputPath;
// Load paint scale configs on mount // Load paint scale configs on mount
useEffect(() => { useEffect(() => {
window.electron.ipcRenderer window.electron.ipcRenderer
.invoke(getConfigsMethod) .invoke(getConfigsMethod)
.then((configs: PaintScaleConfig[]) => { .then((configs: PaintScaleConfig[]) => {
// Ensure all configs have a pollingInterval and type (for backward compatibility) // Ensure all configs have a pollingInterval and type (for backward compatibility)
const updatedConfigs = configs.map(config => ({ const defaultPolling = configType === "input" ? 1440 : 60;
...config, const updatedConfigs = configs.map((config) => ({
pollingInterval: config.pollingInterval || 1440, // Default to 1440 seconds ...config,
type: config.type || PaintScaleType.PPG, // Default type if missing pollingInterval: config.pollingInterval || defaultPolling, // Default to 1440 for input, 60 for output
})); type: config.type || PaintScaleType.PPG, // Default type if missing
setPaintScaleConfigs(updatedConfigs || []); }));
}) setPaintScaleConfigs(updatedConfigs || []);
.catch((error) => { })
console.error(`Failed to load paint scale ${configType} configs:`, error); .catch((error) => {
}); console.error(
`Failed to load paint scale ${configType} configs:`,
error,
);
});
}, [getConfigsMethod]); }, [getConfigsMethod]);
// Save configs to store and notify main process of config changes // Save configs to store and notify main process of config changes
const saveConfigs = (configs: PaintScaleConfig[]) => { const saveConfigs = (configs: PaintScaleConfig[]) => {
window.electron.ipcRenderer window.electron.ipcRenderer
.invoke(setConfigsMethod, configs) .invoke(setConfigsMethod, configs)
.then(() => { .then(() => {
// Notify main process to update cron job // Notify main process to update cron job
if (configType === 'input') { if (configType === "input") {
window.electron.ipcRenderer.send(ipcTypes.toMain.settings.paintScale.updateInputCron, configs); window.electron.ipcRenderer.send(
} else if (configType === 'output') { ipcTypes.toMain.settings.paintScale.updateInputCron,
window.electron.ipcRenderer.send(ipcTypes.toMain.settings.paintScale.updateOutputCron, configs); configs,
} );
}) } else if (configType === "output") {
.catch((error) => { window.electron.ipcRenderer.send(
console.error(`Failed to save paint scale ${configType} configs:`, error); ipcTypes.toMain.settings.paintScale.updateOutputCron,
}); configs,
);
}
})
.catch((error) => {
console.error(
`Failed to save paint scale ${configType} configs:`,
error,
);
});
}; };
// New helper to check if a path is unique across input and output configs // New helper to check if a path is unique across input and output configs
const checkPathUnique = async (newPath: string): Promise<boolean> => { const checkPathUnique = async (newPath: string): Promise<boolean> => {
try { try {
const inputConfigs: PaintScaleConfig[] = await window.electron.ipcRenderer.invoke(ipcTypes.toMain.settings.paintScale.getInputConfigs); const inputConfigs: PaintScaleConfig[] =
const outputConfigs: PaintScaleConfig[] = await window.electron.ipcRenderer.invoke(ipcTypes.toMain.settings.paintScale.getOutputConfigs); await window.electron.ipcRenderer.invoke(
ipcTypes.toMain.settings.paintScale.getInputConfigs,
);
const outputConfigs: PaintScaleConfig[] =
await window.electron.ipcRenderer.invoke(
ipcTypes.toMain.settings.paintScale.getOutputConfigs,
);
const allConfigs = [...inputConfigs, ...outputConfigs]; const allConfigs = [...inputConfigs, ...outputConfigs];
// Allow updating the current config even if its current value equals newPath. // Allow updating the current config even if its current value equals newPath.
return !allConfigs.some(config => config.path === newPath); return !allConfigs.some((config) => config.path === newPath);
} catch (error) { } catch (error) {
console.error("Failed to check unique path:", error); console.error("Failed to check unique path:", error);
return false; return false;
@@ -74,10 +101,11 @@ export const usePaintScaleConfig = (configType: ConfigType) => {
// Handle adding a new paint scale config // Handle adding a new paint scale config
const handleAddConfig = (type: PaintScaleType) => { const handleAddConfig = (type: PaintScaleType) => {
const defaultPolling = configType === "input" ? 1440 : 60;
const newConfig: PaintScaleConfig = { const newConfig: PaintScaleConfig = {
id: Date.now().toString(), id: Date.now().toString(),
type, type,
pollingInterval: 1440, // Default to 1440 seconds pollingInterval: defaultPolling, // Default to 1440 for input, 60 for output
}; };
const updatedConfigs = [...paintScaleConfigs, newConfig]; const updatedConfigs = [...paintScaleConfigs, newConfig];
setPaintScaleConfigs(updatedConfigs); setPaintScaleConfigs(updatedConfigs);
@@ -86,7 +114,9 @@ export const usePaintScaleConfig = (configType: ConfigType) => {
// Handle removing a config // Handle removing a config
const handleRemoveConfig = (id: string) => { const handleRemoveConfig = (id: string) => {
const updatedConfigs = paintScaleConfigs.filter((config) => config.id !== id); const updatedConfigs = paintScaleConfigs.filter(
(config) => config.id !== id,
);
setPaintScaleConfigs(updatedConfigs); setPaintScaleConfigs(updatedConfigs);
saveConfigs(updatedConfigs); saveConfigs(updatedConfigs);
}; };
@@ -94,7 +124,10 @@ export const usePaintScaleConfig = (configType: ConfigType) => {
// Handle path selection (modified to check directory uniqueness) // Handle path selection (modified to check directory uniqueness)
const handlePathChange = async (id: string) => { const handlePathChange = async (id: string) => {
try { try {
const path: string | null = await window.electron.ipcRenderer.invoke(setPathMethod, id); const path: string | null = await window.electron.ipcRenderer.invoke(
setPathMethod,
id,
);
if (path) { if (path) {
const isUnique = await checkPathUnique(path); const isUnique = await checkPathUnique(path);
if (!isUnique) { if (!isUnique) {
@@ -115,7 +148,7 @@ export const usePaintScaleConfig = (configType: ConfigType) => {
// Handle polling interval change // Handle polling interval change
const handlePollingIntervalChange = (id: string, pollingInterval: number) => { const handlePollingIntervalChange = (id: string, pollingInterval: number) => {
const updatedConfigs = paintScaleConfigs.map((config) => const updatedConfigs = paintScaleConfigs.map((config) =>
config.id === id ? { ...config, pollingInterval } : config, config.id === id ? { ...config, pollingInterval } : config,
); );
setPaintScaleConfigs(updatedConfigs); setPaintScaleConfigs(updatedConfigs);
saveConfigs(updatedConfigs); saveConfigs(updatedConfigs);

View File

@@ -35,7 +35,7 @@ const SettingsPaintScaleInputPaths = (): JSX.Element => {
handleRemoveConfig, handleRemoveConfig,
handlePathChange, handlePathChange,
handlePollingIntervalChange, handlePollingIntervalChange,
} = usePaintScaleConfig("input"); } = usePaintScaleConfig("output");
const [isModalVisible, setIsModalVisible] = useState(false); const [isModalVisible, setIsModalVisible] = useState(false);
const [selectedType, setSelectedType] = useState<PaintScaleType | null>(null); const [selectedType, setSelectedType] = useState<PaintScaleType | null>(null);

View File

@@ -33,7 +33,7 @@ const SettingsPaintScaleOutputPaths = (): JSX.Element => {
handleRemoveConfig, handleRemoveConfig,
handlePathChange, handlePathChange,
handlePollingIntervalChange, handlePollingIntervalChange,
} = usePaintScaleConfig("output"); } = usePaintScaleConfig("input");
const [isModalVisible, setIsModalVisible] = useState(false); const [isModalVisible, setIsModalVisible] = useState(false);
const [selectedType, setSelectedType] = useState<PaintScaleType | null>(null); const [selectedType, setSelectedType] = useState<PaintScaleType | null>(null);

View File

@@ -0,0 +1,5 @@
import { BrowserWindow } from "electron";
export default function getMainWindow(): BrowserWindow | null {
return BrowserWindow.getAllWindows()[0] || null;
}

309
src/util/memUsage.ts Normal file
View File

@@ -0,0 +1,309 @@
import { BrowserWindow } from "electron";
import log from "electron-log/main";
import fs from "fs";
import os from "os";
import path from "path";
import Store from "../main/store/store";
/**
* Human-readable memory/cpu/resource snapshot.
*/
export type MemoryUsageStats = {
timestamp: string;
label?: string;
uptimeSeconds: number;
pid: number;
memory: {
rss: number;
heapTotal: number;
heapUsed: number;
external: number;
arrayBuffers?: number;
};
memoryPretty: {
rss: string;
heapTotal: string;
heapUsed: string;
external: string;
arrayBuffers?: string;
};
os: {
totalMem: number;
freeMem: number;
freeMemPercent: number;
};
cpuUsage?: NodeJS.CpuUsage;
resourceUsage?: NodeJS.ResourceUsage;
heapSpaces?: Array<import("v8").HeapSpaceInfo>;
heapSnapshotFile?: string;
custom?: Record<string, unknown>;
};
// (merged into top import)
/**
* Options for dumpMemoryStats.
*/
export type DumpOptions = {
/**
* Call global.gc() before sampling if available (requires node run with --expose-gc).
*/
runGc?: boolean;
/**
* Optional label to include in the returned snapshot.
*/
label?: string;
includeHeapSpaces?: boolean;
writeHeapSnapshot?: boolean;
heapSnapshotDir?: string;
};
/**
* Convert bytes to a compact human readable string.
*/
function formatBytes(bytes: number): string {
if (!isFinite(bytes)) return String(bytes);
const units = ["B", "KB", "MB", "GB", "TB"];
let i = 0;
let val = bytes;
while (val >= 1024 && i < units.length - 1) {
val /= 1024;
i++;
}
return `${val.toFixed(2)} ${units[i]}`;
}
/**
* Asynchronously produce a memory / cpu / os snapshot.
*
* Example:
* const stats = await dumpMemoryStats({ runGc: true, label: 'before-heavy-task' });
*/
export async function dumpMemoryStats(
options: DumpOptions = {},
): Promise<MemoryUsageStats> {
const {
runGc = false,
label,
includeHeapSpaces = true,
writeHeapSnapshot = true,
heapSnapshotDir,
} = options;
// Allow GC if requested and available to get a cleaner snapshot
if (runGc && typeof (global as any).gc === "function") {
try {
(global as any).gc();
} catch {
// ignore GC errors
}
}
// Let the event loop settle a tick so GC can complete if run
await new Promise((resolve) => setImmediate(resolve));
const mem = process.memoryUsage();
const totalMem = os.totalmem();
const freeMem = os.freemem();
const stats: MemoryUsageStats = {
timestamp: new Date().toISOString(),
label,
uptimeSeconds: Math.floor(process.uptime()),
pid: process.pid,
memory: {
rss: mem.rss,
heapTotal: mem.heapTotal,
heapUsed: mem.heapUsed,
external: mem.external,
arrayBuffers: mem.arrayBuffers,
},
memoryPretty: {
rss: formatBytes(mem.rss),
heapTotal: formatBytes(mem.heapTotal),
heapUsed: formatBytes(mem.heapUsed),
external: formatBytes(mem.external),
arrayBuffers:
mem.arrayBuffers !== undefined
? formatBytes(mem.arrayBuffers)
: undefined,
},
os: {
totalMem,
freeMem,
freeMemPercent: Math.round((freeMem / totalMem) * 10000) / 100,
},
cpuUsage: process.cpuUsage ? process.cpuUsage() : undefined,
resourceUsage:
typeof process.resourceUsage === "function"
? process.resourceUsage()
: undefined,
custom: {
numBrowserWindows: BrowserWindow.getAllWindows().length,
},
};
if (includeHeapSpaces) {
try {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const v8: typeof import("v8") = require("v8");
if (typeof v8.getHeapSpaceStatistics === "function") {
stats.heapSpaces = v8.getHeapSpaceStatistics();
}
} catch (err) {
log.warn("Failed to get heap space stats", err);
}
}
if (writeHeapSnapshot) {
try {
if (!runGc && typeof (global as any).gc === "function") {
try {
(global as any).gc();
} catch {
/* ignore */
}
}
// eslint-disable-next-line @typescript-eslint/no-var-requires
const v8: typeof import("v8") = require("v8");
if (typeof v8.writeHeapSnapshot === "function") {
const baseDir =
heapSnapshotDir || path.dirname(log.transports.file.getFile().path);
const dir = path.join(baseDir, "heap-snapshots");
fs.mkdirSync(dir, { recursive: true });
const fileName = `heap-${Date.now()}-${process.pid}.heapsnapshot`;
const filePath = path.join(dir, fileName);
const snapshotPath = v8.writeHeapSnapshot(filePath);
stats.heapSnapshotFile = snapshotPath;
} else {
log.warn("v8.writeHeapSnapshot not available");
}
} catch (err) {
log.warn("Failed to write heap snapshot", err);
}
}
return stats;
}
const memLogger = log.create({ logId: "mem-stat" });
memLogger.transports.file.resolvePathFn = () => {
const filePath = path.join(
path.dirname(log.transports.file.getFile().path),
"memory-stats.log",
);
return filePath;
};
// Configure memory logger format to include process ID
memLogger.transports.file.format =
"[{y}-{m}-{d} {h}:{i}:{s}.{ms}] [{level}] [PID:{processId}] {text}";
memLogger.transports.console.format =
"[{y}-{m}-{d} {h}:{i}:{s}.{ms}] [{level}] [PID:{processId}] {text}";
export async function dumpMemoryStatsToFile() {
try {
const stats = await dumpMemoryStats({ includeHeapSpaces: false });
memLogger.debug("[MemStat]:", stats);
} catch (error) {
log.warn("Unexpected error while writing memory stats log", error);
}
}
function ongoingMemoryDump() {
console.log(
`Memory logging set to ${Store.get("settings.enableMemDebug")}. Log file at ${memLogger.transports.file.getFile().path}`,
);
setInterval(
async () => {
// Also write each snapshot to a dedicated memory stats log file as JSON lines.
try {
const loggingEnabled = Store.get("settings.enableMemDebug");
log.debug(
"Checking if memory stats logging is enabled.",
loggingEnabled,
);
if (loggingEnabled) {
// Enforce heap snapshot folder size limit (< 1GB) before writing a new snapshot.
const MAX_DIR_BYTES = 5 * 1024 * 1024 * 1024; // 5GB
const TARGET_REDUCED_BYTES = Math.floor(MAX_DIR_BYTES * 0.9); // prune down to 90%
const baseDir = path.dirname(log.transports.file.getFile().path);
const heapDir = path.join(baseDir, "heap-snapshots");
try {
fs.mkdirSync(heapDir, { recursive: true });
const files = fs
.readdirSync(heapDir)
.filter((f) => f.endsWith(".heapsnapshot"));
let totalSize = 0;
const fileStats: Array<{
file: string;
size: number;
mtimeMs: number;
}> = [];
for (const file of files) {
try {
const stat = fs.statSync(path.join(heapDir, file));
if (stat.isFile()) {
totalSize += stat.size;
fileStats.push({
file,
size: stat.size,
mtimeMs: stat.mtimeMs,
});
}
} catch (e) {
log.warn("Failed to stat heap snapshot file", file, e);
}
}
if (totalSize > MAX_DIR_BYTES) {
// Sort oldest first and delete until below TARGET_REDUCED_BYTES.
fileStats.sort((a, b) => a.mtimeMs - b.mtimeMs);
let bytesAfter = totalSize;
for (const info of fileStats) {
if (bytesAfter <= TARGET_REDUCED_BYTES) break;
try {
fs.unlinkSync(path.join(heapDir, info.file));
bytesAfter -= info.size;
log.warn(
`Pruned heap snapshot '${info.file}' (${formatBytes(info.size)}) to reduce directory size. New size: ${formatBytes(bytesAfter)}.`,
);
} catch (errDel) {
log.warn(
"Failed to delete heap snapshot file",
info.file,
errDel,
);
}
}
if (bytesAfter > MAX_DIR_BYTES) {
// Still above hard cap; skip writing a new snapshot this cycle.
log.warn(
`Heap snapshot directory still above hard cap (${formatBytes(bytesAfter)} > ${formatBytes(MAX_DIR_BYTES)}). Skipping new heap snapshot this cycle.`,
);
const stats = await dumpMemoryStats({
includeHeapSpaces: false,
writeHeapSnapshot: false,
});
memLogger.debug("[MemStat]:", stats);
return; // skip remainder; we already logged stats without snapshot.
}
}
} catch (dirErr) {
log.warn(
"Unexpected error while enforcing heap snapshot directory size limit",
dirErr,
);
// Continue; failure to enforce limit should not stop memory stats.
}
// Directory is within allowed bounds (or pruning succeeded); proceed normally.
const stats = await dumpMemoryStats({ includeHeapSpaces: false });
memLogger.debug("[MemStat]:", stats);
}
} catch (err) {
log.warn("Unexpected error while writing memory stats log", err);
}
},
15 * 60 * 1000,
); // every 15 minutes
}
export default ongoingMemoryDump;

62
tests/heapPrune.test.ts Normal file
View File

@@ -0,0 +1,62 @@
import { test, expect } from "@playwright/test";
import fs from "fs";
import path from "path";
// We import the module after setting up a temporary log path by monkey patching electron-log.
// Since the project primarily uses Playwright for tests, we leverage its expect assertion library.
// NOTE: This is a lightweight test that simulates the pruning logic indirectly by invoking the exported ongoingMemoryDump
// function and creating artificial heap snapshot files exceeding the threshold.
// Because ongoingMemoryDump sets an interval, we invoke its internal logic by importing the file and manually calling dumpMemoryStats.
// For simplicity and to avoid altering production code for testability, we replicate the size enforcement logic here and assert behavior.
function createDummySnapshots(dir: string, count: number, sizeBytes: number) {
fs.mkdirSync(dir, { recursive: true });
for (let i = 0; i < count; i++) {
const file = path.join(dir, `dummy-${i}.heapsnapshot`);
const fd = fs.openSync(file, "w");
// Write sizeBytes of zeros
const buf = Buffer.alloc(1024 * 1024, 0); // 1MB chunk
let written = 0;
while (written < sizeBytes) {
fs.writeSync(fd, buf, 0, Math.min(buf.length, sizeBytes - written));
written += Math.min(buf.length, sizeBytes - written);
}
fs.closeSync(fd);
// Stagger mtime for deterministic pruning ordering
const mtime = new Date(Date.now() - (count - i) * 1000);
fs.utimesSync(file, mtime, mtime);
}
}
test("heap snapshot directory pruning reduces size below simulated hard cap", async () => {
const baseDir = fs.mkdtempSync(path.join(process.cwd(), "heap-test-"));
const heapDir = path.join(baseDir, "heap-snapshots");
// Simulate oversize: 15 files of 5MB each = 75MB
createDummySnapshots(heapDir, 15, 5 * 1024 * 1024);
// Use smaller cap to keep test resource usage low.
const MAX_DIR_BYTES = 50 * 1024 * 1024; // 50MB simulated cap
const TARGET_REDUCED_BYTES = Math.floor(MAX_DIR_BYTES * 0.9);
const files = fs
.readdirSync(heapDir)
.filter((f) => f.endsWith(".heapsnapshot"));
let totalSize = 0;
const fileStats: Array<{ file: string; size: number; mtimeMs: number }> = [];
for (const file of files) {
const stat = fs.statSync(path.join(heapDir, file));
totalSize += stat.size;
fileStats.push({ file, size: stat.size, mtimeMs: stat.mtimeMs });
}
expect(totalSize).toBeGreaterThan(MAX_DIR_BYTES);
fileStats.sort((a, b) => a.mtimeMs - b.mtimeMs);
let bytesAfter = totalSize;
for (const info of fileStats) {
if (bytesAfter <= TARGET_REDUCED_BYTES) break;
fs.unlinkSync(path.join(heapDir, info.file));
bytesAfter -= info.size;
}
expect(bytesAfter).toBeLessThanOrEqual(TARGET_REDUCED_BYTES);
// Cleanup
fs.rmSync(baseDir, { recursive: true, force: true });
});