Eisgnature Migrations, webhook handling, and clean up.

This commit is contained in:
Patrick Fic
2026-03-25 15:24:14 -07:00
parent e17b57c705
commit d4c7298334
23 changed files with 615 additions and 67 deletions

View File

@@ -1,4 +1,4 @@
<babeledit_project be_version="2.7.1" version="1.2">
<babeledit_project version="1.2" be_version="2.7.1">
<!--
BabelEdit project file
@@ -20487,6 +20487,37 @@
</folder_node>
</children>
</folder_node>
<folder_node>
<name>esignature</name>
<children>
<folder_node>
<name>actions</name>
<children>
<concept_node>
<name>distribute</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
</children>
</folder_node>
</children>
</folder_node>
<folder_node>
<name>eula</name>
<children>
@@ -37705,6 +37736,27 @@
</translation>
</translations>
</concept_node>
<concept_node>
<name>esignature</name>
<definition_loaded>false</definition_loaded>
<description></description>
<comment></comment>
<default_text></default_text>
<translations>
<translation>
<language>en-US</language>
<approved>false</approved>
</translation>
<translation>
<language>es-MX</language>
<approved>false</approved>
</translation>
<translation>
<language>fr-CA</language>
<approved>false</approved>
</translation>
</translations>
</concept_node>
<concept_node>
<name>estimatelines</name>
<definition_loaded>false</definition_loaded>

View File

@@ -1,5 +1,5 @@
import { EmbedUpdateDocumentV1 } from "@documenso/embed-react";
import { Modal } from "antd";
import { Modal, notification } from "antd";
import axios from "axios";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -7,6 +7,7 @@ import { createStructuredSelector } from "reselect";
import { toggleModalVisible } from "../../redux/modals/modals.actions";
import { selectEsignature } from "../../redux/modals/modals.selectors";
import { selectBodyshop, selectCurrentUser } from "../../redux/user/user.selectors";
import { useState } from "react";
const mapStateToProps = createStructuredSelector({
esignatureModal: selectEsignature,
@@ -22,13 +23,14 @@ export function EsignatureModalContainer({ esignatureModal, toggleModalVisible,
const { t } = useTranslation();
const { open, context } = esignatureModal;
const { token, envelopeId, documentId, jobid } = context;
const [distributing, setDistributing] = useState(false);
return (
<Modal
open={open}
title={t("jobs.labels.esignature")}
onOk={async () => {
try {
setDistributing(true);
const distResult = await axios.post("/esign/distribute", {
documentId,
envelopeId,
@@ -39,7 +41,12 @@ export function EsignatureModalContainer({ esignatureModal, toggleModalVisible,
toggleModalVisible();
} catch (error) {
console.error("Error distributing document:", error);
notification.error({
message: t("esignature.distribute_error"),
description: error?.response?.data?.message || error.message
});
}
setDistributing(false);
}}
onCancel={async () => {
try {
@@ -51,11 +58,16 @@ export function EsignatureModalContainer({ esignatureModal, toggleModalVisible,
toggleModalVisible();
} catch (error) {
console.error("Error cancelling document:", error);
notification.error({
message: t("esignature.cancel_error"),
description: error?.response?.data?.message || error.message
});
}
}}
okButtonProps={{ title: "Distribute by Email" }}
width="90%"
okButtonProps={{ loading: distributing }}
okText={t("esignature.actions.distribute")}
destroyOnHidden
width={800}
>
<div style={{ height: "600px", width: "100%" }}>
{token ? (

View File

@@ -1,6 +1,6 @@
import { SyncOutlined } from "@ant-design/icons";
import { useQuery } from "@apollo/client/react";
import { Button, Card, Col, Row, Tag } from "antd";
import { Button, Card, Checkbox, Col, Row, Space, Tag } from "antd";
import ResponsiveTable from "../responsive-table/responsive-table.component";
import { useTranslation } from "react-i18next";
import { connect } from "react-redux";
@@ -12,6 +12,8 @@ import { DateTimeFormatter } from "../../utils/DateFormatter";
import BlurWrapperComponent from "../feature-wrapper/blur-wrapper.component";
import { HasFeatureAccess } from "../feature-wrapper/feature-wrapper.component";
import UpsellComponent, { upsellEnum } from "../upsell/upsell.component";
import axios from "axios";
import { useNotification } from "../../contexts/Notifications/notificationContext";
const mapStateToProps = createStructuredSelector({
bodyshop: selectBodyshop
@@ -23,6 +25,7 @@ export default connect(mapStateToProps, mapDispatchToProps)(JobAuditTrail);
export function JobAuditTrail({ bodyshop, jobId }) {
const { t } = useTranslation();
const notification = useNotification();
const { loading, data, refetch } = useQuery(QUERY_AUDIT_TRAIL, {
variables: { jobid: jobId },
skip: !jobId,
@@ -53,6 +56,125 @@ export function JobAuditTrail({ bodyshop, jobId }) {
)
}
];
const esigColumns = [
{
title: t("audit.fields.created_at"),
dataIndex: "created_at",
key: "created_at",
render: (text) => <DateTimeFormatter>{text}</DateTimeFormatter>
},
{
title: t("audit.fields.updated_at"),
dataIndex: "updated_at",
key: "updated_at",
render: (text) => <DateTimeFormatter>{text}</DateTimeFormatter>
},
{
title: t("audit.fields.title"),
dataIndex: "title",
key: "title",
render: (text) => (
<BlurWrapperComponent featureName="audit" bypass>
<div>{text}</div>
</BlurWrapperComponent>
)
},
{
title: t("audit.fields.external_document_id"),
dataIndex: "external_document_id",
key: "external_document_id",
render: (text) => (
<BlurWrapperComponent featureName="audit" bypass>
<div>{text}</div>
</BlurWrapperComponent>
)
},
{
title: t("audit.fields.status"),
dataIndex: "status",
key: "status",
render: (text) => (
<BlurWrapperComponent featureName="audit" bypass>
<div>{text}</div>
</BlurWrapperComponent>
)
},
{
title: t("audit.fields.opened"),
dataIndex: "opened",
key: "opened",
render: (text) => <Checkbox checked={text} disabled />
},
{
title: t("audit.fields.rejected"),
dataIndex: "rejected",
key: "rejected",
render: (text) => <Checkbox checked={text} disabled />
},
{
title: t("audit.fields.completed"),
dataIndex: "completed",
key: "completed",
render: (text) => <Checkbox checked={text} disabled />
},
{
title: t("audit.fields.completed_at"),
dataIndex: "completed_at",
key: "completed_at",
render: (text) => <DateTimeFormatter>{text}</DateTimeFormatter>
},
{
title: t("general.labels.actions"),
dataIndex: "actions",
key: "actions",
render: (_text, record) => (
<Space wrap>
<Button
disabled={record.completed_at !== null || record.status === "REJECTED"}
onClick={async () => {
logImEXEvent("job_esig_delete", {});
try {
const deleteResult = await axios.post("/esign/delete", {
documentId: record.external_document_id
});
console.log("*** ~ JobAuditTrail ~ deleteResult:", deleteResult);
refetch();
} catch (error) {
console.error("Error deleting document:", error?.response?.data || error.message);
notification.error({
message: t("esignature.delete_error"),
description: error?.response?.data?.error || error.message
});
}
}}
>
{t("esignature.actions.delete")}
</Button>
<Button onClick={() => console.log(record)}>{t("esignature.actions.resend")}</Button>
<Button
onClick={() => {
axios
.post("/esign/view", {
documentId: record.external_document_id
})
.then((response) => {
window.open(response.data?.document?.downloadUrl, "_blank");
})
.catch((error) => {
console.error("Error viewing document:", error?.response?.data || error.message);
notification.error({
message: t("esignature.view_error"),
description: error?.response?.data?.message || error.message
});
});
}}
>
{t("esignature.actions.view")}
</Button>
</Space>
)
}
];
const emailColumns = [
{
title: t("audit.fields.created"),
@@ -184,6 +306,17 @@ export function JobAuditTrail({ bodyshop, jobId }) {
/>
</Card>
</Col>
<Col span={24}>
<Card title={t("jobs.labels.esignatures")}>
<ResponsiveTable
loading={loading}
columns={esigColumns}
mobileColumnKeys={["title", "status"]}
rowKey="id"
dataSource={data ? data.esignature_documents : []}
/>
</Card>
</Col>
</Row>
);
}

View File

@@ -22,6 +22,23 @@ export const QUERY_AUDIT_TRAIL = gql`
useremail
status
}
esignature_documents(where: {jobid: {_eq: $jobid}}) {
id
created_at
updated_at
jobid
external_document_id
subject
message
title
status
recipients
completed_at
opened
completed
rejected
completed_at
}
}
`;

View File

@@ -1239,6 +1239,11 @@
"unique_employee_number": "You must enter a unique employee number."
}
},
"esignature": {
"actions": {
"distribute": "Distribute"
}
},
"eula": {
"buttons": {
"accept": "Accept EULA"

View File

@@ -1239,6 +1239,11 @@
"unique_employee_number": ""
}
},
"esignature": {
"actions": {
"distribute": ""
}
},
"eula": {
"buttons": {
"accept": "Accept EULA"

View File

@@ -1239,6 +1239,11 @@
"unique_employee_number": ""
}
},
"esignature": {
"actions": {
"distribute": ""
}
},
"eula": {
"buttons": {
"accept": "Accept EULA"

View File

@@ -0,0 +1,72 @@
name: documenso-production
services:
database:
image: postgres:15
environment:
- POSTGRES_USER=${POSTGRES_USER:?err}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:?err}
- POSTGRES_DB=${POSTGRES_DB:?err}
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U ${POSTGRES_USER}']
interval: 10s
timeout: 5s
retries: 5
volumes:
- database:/var/lib/postgresql/data
documenso:
image: documenso/documenso:latest
depends_on:
database:
condition: service_healthy
environment:
- PORT=${PORT:-3000}
- NEXTAUTH_SECRET=${NEXTAUTH_SECRET:?err}
- NEXT_PRIVATE_ENCRYPTION_KEY=${NEXT_PRIVATE_ENCRYPTION_KEY:?err}
- NEXT_PRIVATE_ENCRYPTION_SECONDARY_KEY=${NEXT_PRIVATE_ENCRYPTION_SECONDARY_KEY:?err}
- NEXT_PRIVATE_GOOGLE_CLIENT_ID=${NEXT_PRIVATE_GOOGLE_CLIENT_ID}
- NEXT_PRIVATE_GOOGLE_CLIENT_SECRET=${NEXT_PRIVATE_GOOGLE_CLIENT_SECRET}
- NEXT_PUBLIC_WEBAPP_URL=${NEXT_PUBLIC_WEBAPP_URL:?err}
- NEXT_PRIVATE_INTERNAL_WEBAPP_URL=${NEXT_PRIVATE_INTERNAL_WEBAPP_URL:-http://localhost:$PORT}
- NEXT_PRIVATE_DATABASE_URL=${NEXT_PRIVATE_DATABASE_URL:?err}
- NEXT_PRIVATE_DIRECT_DATABASE_URL=${NEXT_PRIVATE_DIRECT_DATABASE_URL:-${NEXT_PRIVATE_DATABASE_URL}}
- NEXT_PUBLIC_UPLOAD_TRANSPORT=${NEXT_PUBLIC_UPLOAD_TRANSPORT:-database}
- NEXT_PRIVATE_UPLOAD_ENDPOINT=${NEXT_PRIVATE_UPLOAD_ENDPOINT}
- NEXT_PRIVATE_UPLOAD_FORCE_PATH_STYLE=${NEXT_PRIVATE_UPLOAD_FORCE_PATH_STYLE}
- NEXT_PRIVATE_UPLOAD_REGION=${NEXT_PRIVATE_UPLOAD_REGION}
- NEXT_PRIVATE_UPLOAD_BUCKET=${NEXT_PRIVATE_UPLOAD_BUCKET}
- NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID=${NEXT_PRIVATE_UPLOAD_ACCESS_KEY_ID}
- NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY=${NEXT_PRIVATE_UPLOAD_SECRET_ACCESS_KEY}
- NEXT_PRIVATE_SMTP_TRANSPORT=${NEXT_PRIVATE_SMTP_TRANSPORT:?err}
- NEXT_PRIVATE_SMTP_HOST=${NEXT_PRIVATE_SMTP_HOST}
- NEXT_PRIVATE_SMTP_PORT=${NEXT_PRIVATE_SMTP_PORT}
- NEXT_PRIVATE_SMTP_USERNAME=${NEXT_PRIVATE_SMTP_USERNAME}
- NEXT_PRIVATE_SMTP_PASSWORD=${NEXT_PRIVATE_SMTP_PASSWORD}
- NEXT_PRIVATE_SMTP_APIKEY_USER=${NEXT_PRIVATE_SMTP_APIKEY_USER}
- NEXT_PRIVATE_SMTP_APIKEY=${NEXT_PRIVATE_SMTP_APIKEY}
- NEXT_PRIVATE_SMTP_SECURE=${NEXT_PRIVATE_SMTP_SECURE}
- NEXT_PRIVATE_SMTP_UNSAFE_IGNORE_TLS=${NEXT_PRIVATE_SMTP_UNSAFE_IGNORE_TLS}
- NEXT_PRIVATE_SMTP_FROM_NAME=${NEXT_PRIVATE_SMTP_FROM_NAME:?err}
- NEXT_PRIVATE_SMTP_FROM_ADDRESS=${NEXT_PRIVATE_SMTP_FROM_ADDRESS:?err}
- NEXT_PRIVATE_SMTP_SERVICE=${NEXT_PRIVATE_SMTP_SERVICE}
- NEXT_PRIVATE_RESEND_API_KEY=${NEXT_PRIVATE_RESEND_API_KEY}
- NEXT_PRIVATE_MAILCHANNELS_API_KEY=${NEXT_PRIVATE_MAILCHANNELS_API_KEY}
- NEXT_PRIVATE_MAILCHANNELS_ENDPOINT=${NEXT_PRIVATE_MAILCHANNELS_ENDPOINT}
- NEXT_PRIVATE_MAILCHANNELS_DKIM_DOMAIN=${NEXT_PRIVATE_MAILCHANNELS_DKIM_DOMAIN}
- NEXT_PRIVATE_MAILCHANNELS_DKIM_SELECTOR=${NEXT_PRIVATE_MAILCHANNELS_DKIM_SELECTOR}
- NEXT_PRIVATE_MAILCHANNELS_DKIM_PRIVATE_KEY=${NEXT_PRIVATE_MAILCHANNELS_DKIM_PRIVATE_KEY}
- NEXT_PUBLIC_DOCUMENT_SIZE_UPLOAD_LIMIT=${NEXT_PUBLIC_DOCUMENT_SIZE_UPLOAD_LIMIT}
- NEXT_PUBLIC_POSTHOG_KEY=${NEXT_PUBLIC_POSTHOG_KEY}
- NEXT_PUBLIC_DISABLE_SIGNUP=${NEXT_PUBLIC_DISABLE_SIGNUP}
- NEXT_PRIVATE_ALLOWED_SIGNUP_DOMAINS=${NEXT_PRIVATE_ALLOWED_SIGNUP_DOMAINS}
- NEXT_PRIVATE_SIGNING_LOCAL_FILE_PATH=${NEXT_PRIVATE_SIGNING_LOCAL_FILE_PATH:-/opt/documenso/cert.p12}
- NEXT_PRIVATE_SIGNING_PASSPHRASE=${NEXT_PRIVATE_SIGNING_PASSPHRASE}
- NEXT_PUBLIC_USE_INTERNAL_URL_BROWSERLESS=${NEXT_PUBLIC_USE_INTERNAL_URL_BROWSERLESS}
ports:
- ${PORT:-3000}:${PORT:-3000}
volumes:
- /opt/documenso/cert.p12:/opt/documenso/cert.p12:ro
volumes:
database:

View File

@@ -2156,10 +2156,12 @@
- active:
_eq: true
columns:
- commission_rates
- created_at
- employeeid
- id
- labor_rates
- payout_method
- percentage
- teamid
- updated_at
@@ -2167,10 +2169,12 @@
- role: user
permission:
columns:
- commission_rates
- created_at
- employeeid
- id
- labor_rates
- payout_method
- percentage
- teamid
- updated_at
@@ -2188,10 +2192,12 @@
- role: user
permission:
columns:
- commission_rates
- created_at
- employeeid
- id
- labor_rates
- payout_method
- percentage
- teamid
- updated_at
@@ -2560,6 +2566,101 @@
_eq: X-Hasura-User-Id
- active:
_eq: true
- table:
name: esignature_documents
schema: public
object_relationships:
- name: document
using:
foreign_key_constraint_on: documentid
- name: job
using:
foreign_key_constraint_on: jobid
insert_permissions:
- role: user
permission:
check:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
columns:
- completed
- documentid
- external_document_id
- jobid
- message
- opened
- recipients
- rejected
- status
- subject
- title
comment: ""
select_permissions:
- role: user
permission:
columns:
- completed
- completed_at
- created_at
- documentid
- external_document_id
- id
- jobid
- message
- opened
- recipients
- rejected
- status
- subject
- title
- updated_at
filter:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
comment: ""
update_permissions:
- role: user
permission:
columns:
- completed
- completed_at
- created_at
- documentid
- external_document_id
- message
- opened
- recipients
- rejected
- status
- subject
- title
- updated_at
filter:
job:
bodyshop:
associations:
_and:
- active:
_eq: true
- user:
authid:
_eq: X-Hasura-User-Id
check: null
comment: ""
- table:
name: eula_acceptances
schema: public
@@ -3458,6 +3559,13 @@
table:
name: email_audit_trail
schema: public
- name: esignature_documents
using:
foreign_key_constraint_on:
column: jobid
table:
name: esignature_documents
schema: public
- name: exportlogs
using:
foreign_key_constraint_on:
@@ -6506,6 +6614,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- task_name
@@ -6531,6 +6640,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- task_name
@@ -6565,6 +6675,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- task_name
@@ -6748,6 +6859,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- updated_at
@@ -6768,6 +6880,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- updated_at
@@ -6798,6 +6911,7 @@
- id
- jobid
- memo
- payout_context
- productivehrs
- rate
- updated_at

View File

@@ -0,0 +1 @@
DROP TABLE "public"."esignature_documents";

View File

@@ -0,0 +1,18 @@
CREATE TABLE "public"."esignature_documents" ("id" uuid NOT NULL DEFAULT gen_random_uuid(), "created_at" timestamptz NOT NULL DEFAULT now(), "updated_at" timestamptz NOT NULL DEFAULT now(), "external_document_id" text NOT NULL, "jobid" uuid NOT NULL, "status" text NOT NULL, "recipients" jsonb[] NOT NULL, "title" text NOT NULL, "subject" text NOT NULL, "message" text NOT NULL, "viewed" boolean NOT NULL DEFAULT false, "completed" boolean NOT NULL DEFAULT false, "documentid" uuid, "rejected" boolean NOT NULL DEFAULT false, "opened" boolean NOT NULL DEFAULT false, PRIMARY KEY ("id") , FOREIGN KEY ("jobid") REFERENCES "public"."jobs"("id") ON UPDATE restrict ON DELETE restrict);COMMENT ON TABLE "public"."esignature_documents" IS E'Tracking the lifecycle of esignature documents. ';
CREATE OR REPLACE FUNCTION "public"."set_current_timestamp_updated_at"()
RETURNS TRIGGER AS $$
DECLARE
_new record;
BEGIN
_new := NEW;
_new."updated_at" = NOW();
RETURN _new;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER "set_public_esignature_documents_updated_at"
BEFORE UPDATE ON "public"."esignature_documents"
FOR EACH ROW
EXECUTE PROCEDURE "public"."set_current_timestamp_updated_at"();
COMMENT ON TRIGGER "set_public_esignature_documents_updated_at" ON "public"."esignature_documents"
IS 'trigger to set value of column "updated_at" to current timestamp on row update';
CREATE EXTENSION IF NOT EXISTS pgcrypto;

View File

@@ -0,0 +1 @@
alter table "public"."esignature_documents" drop constraint "esignature_documents_documentid_fkey";

View File

@@ -0,0 +1,5 @@
alter table "public"."esignature_documents"
add constraint "esignature_documents_documentid_fkey"
foreign key ("documentid")
references "public"."documents"
("id") on update restrict on delete restrict;

View File

@@ -0,0 +1 @@
ALTER TABLE "public"."esignature_documents" ALTER COLUMN "recipients" TYPE ARRAY;

View File

@@ -0,0 +1 @@
ALTER TABLE "public"."esignature_documents" ALTER COLUMN "recipients" TYPE json[];

View File

@@ -0,0 +1,4 @@
-- Could not auto-generate a down migration.
-- Please write an appropriate down migration for the SQL below:
-- alter table "public"."esignature_documents" add column "completed_at" timestamptz
-- null;

View File

@@ -0,0 +1,2 @@
alter table "public"."esignature_documents" add column "completed_at" timestamptz
null;

View File

@@ -0,0 +1,4 @@
comment on column "public"."esignature_documents"."viewed" is E'Tracking the lifecycle of esignature documents. ';
alter table "public"."esignature_documents" alter column "viewed" set default false;
alter table "public"."esignature_documents" alter column "viewed" drop not null;
alter table "public"."esignature_documents" add column "viewed" bool;

View File

@@ -0,0 +1 @@
alter table "public"."esignature_documents" drop column "viewed" cascade;

View File

@@ -10,7 +10,7 @@ const documenso = new Documenso({
});
const JSR_SERVER = "https://reports.test.imex.online";
const jsreport = require("@jsreport/nodejs-client");
const { QUERY_JOB_FOR_SIGNATURE, INSERT_ESIG_AUDIT_TRAIL } = require("../graphql-client/queries");
const { QUERY_JOB_FOR_SIGNATURE, INSERT_ESIGNATURE_DOCUMENT, DISTRIBUTE_ESIGNATURE_DOCUMENT, QUERY_ESIGNATURE_BY_EXTERNAL_ID, UPDATE_ESIGNATURE_DOCUMENT } = require("../graphql-client/queries");
async function distributeDocument(req, res) {
@@ -22,8 +22,12 @@ async function distributeDocument(req, res) {
documentId,
});
const auditEntry = await client.request(INSERT_ESIG_AUDIT_TRAIL, {
obj: {
const auditEntry = await client.request(DISTRIBUTE_ESIGNATURE_DOCUMENT, {
external_document_id: documentId.toString(),
esig_update: {
status: "SENT"
},
audit: {
jobid: req.body.jobid,
bodyshopid: req.body.bodyshopid,
operation: `Esignature document with title ${distributeResult.title} (ID: ${documentId}) distributed to recipients.`,
@@ -39,17 +43,31 @@ async function distributeDocument(req, res) {
message: error.message, stack: error.stack,
body: req.body
});
res.status(500).json({ error: "An error occurred while distributing the document." });
res.status(500).json({ error: "An error occurred while distributing the document.", message: error.message });
}
}
async function deleteDocument(req, res) {
try {
//TODO: Add in logic to check if doc exists, is deletable etc.
const client = req.userGraphQLClient;
const { documentId } = req.body;
//TODO: This needs to be hardened to prevent deleting other people's documents, completed ones, etc.
const { esignature_documents } = await client.request(QUERY_ESIGNATURE_BY_EXTERNAL_ID, { external_document_id: documentId.toString() });
if (!esignature_documents || esignature_documents.length === 0) {
//return res.status(404).json({ error: "Document not found" });
}
const deleteResult = await documenso.documents.delete({
documentId
documentId: (documentId)
});
await client.request(UPDATE_ESIGNATURE_DOCUMENT, {
external_document_id: documentId.toString(),
esig_update: {
status: "DELETED"
}
})
res.json({ success: true, deleteResult });
} catch (error) {
console.error("Error deleting document:", error?.data);
@@ -61,6 +79,23 @@ async function deleteDocument(req, res) {
}
}
async function viewDocument(req, res) {
try {
const { documentId } = req.body;
const document = await documenso.document.documentDownload({
documentId: parseInt(documentId)
});
res.json({ success: true, document });
} catch (error) {
console.error("Error viewing document:", error?.data);
logger.log(`esig-view-error`, "ERROR", "esig", "api", {
message: error.message, stack: error.stack,
body: req.body
});
res.status(500).json({ error: "An error occurred while retrieving the document.", message: error.message });
}
}
async function newEsignDocument(req, res) {
try {
const client = req.userGraphQLClient;
@@ -68,21 +103,18 @@ async function newEsignDocument(req, res) {
const { pdf: fileBuffer, esigData } = await RenderTemplate({ client, req })
const fileBlob = new Blob([fileBuffer], { type: "application/pdf" });
//Get the Job data.
const { jobs_by_pk: jobData } = await client.request(QUERY_JOB_FOR_SIGNATURE, { jobid: req.body.jobid });
const recipients = [{
email: "patrick@imexsystems.ca",//jobData.ownr_ea,
name: `${jobData.ownr_fn} ${jobData.ownr_ln}`,
role: "SIGNER",
}]
const createDocumentResponse = await documenso.documents.create({
payload: {
title: esigData?.title || `Esign request from ${bodyshop.shopname}`,
externalId: `${req.body.jobid}|${req.user?.email}`, //Have to pass the uploaded by later on. Limited to 255 chars.
recipients: [
{
email: "allan@imexsystems.ca",//jobData.ownr_ea,
name: `${jobData.ownr_fn} ${jobData.ownr_ln}`,
role: "SIGNER",
}
],
recipients,
meta: {
timezone: bodyshop.timezone,
dateFormat: "MM/dd/yyyy hh:mm a",
@@ -99,7 +131,6 @@ async function newEsignDocument(req, res) {
documentId: createDocumentResponse.id,
});
if (esigData?.fields && esigData.fields.length > 0) {
try {
await documenso.envelopes.fields.createMany({
@@ -117,16 +148,23 @@ async function newEsignDocument(req, res) {
const presignToken = await documenso.embedding.embeddingPresignCreateEmbeddingPresignToken({})
//add to job audit trail.
const auditEntry = await client.request(INSERT_ESIG_AUDIT_TRAIL, {
obj: {
const auditEntry = await client.request(INSERT_ESIGNATURE_DOCUMENT, {
audit: {
jobid: req.body.jobid,
bodyshopid: bodyshop.id,
operation: `Esignature document created. Subject: ${esigData?.subject || "No subject"}, Message: ${esigData?.message || "No message"}. Document ID: ${createDocumentResponse.id} Envlope ID: ${createDocumentResponse.envelopeId}`,
useremail: req.user?.email,
type: 'esig-create'
},
esig: {
jobid: req.body.jobid,
external_document_id: createDocumentResponse.id.toString(),
//envelope_id: createDocumentResponse.envelopeId,
subject: esigData?.subject || "No subject",
message: esigData?.message || "No message",
title: esigData?.title || "No title",
status: "DRAFT",
recipients: recipients,
}
})
@@ -283,7 +321,8 @@ const fetchContextData = async ({ templateObject, jsrAuth, req, }) => {
module.exports = {
newEsignDocument,
distributeDocument,
deleteDocument
deleteDocument,
viewDocument
}

View File

@@ -1,10 +1,10 @@
const { Documenso } = require("@documenso/sdk-typescript");
const fs = require("fs");
const path = require("path");
const logger = require("../utils/logger");
const { QUERY_META_FOR_ESIG_COMPLETION, INSERT_ESIGNATURE_DOCUMENT, INSERT_ESIG_AUDIT_TRAIL } = require("../graphql-client/queries");
const { QUERY_META_FOR_ESIG_COMPLETION, INSERT_ESIGNATURE_COMPLETED_DOCOUMENT, UPDATE_ESIGNATURE_DOCUMENT, DISTRIBUTE_ESIGNATURE_DOCUMENT } = require("../graphql-client/queries");
const { uploadFileBuffer } = require("../media/imgproxy-media");
const { log } = require("node-persist");
const client = require('../graphql-client/graphql-client').client;
const documenso = new Documenso({
apiKey: "api_asojim0czruv13ud",//Done on a by team basis,
@@ -30,38 +30,56 @@ async function esignWebhook(req, res) {
body: message
});
//TODO: Implement checks to prevent this from going backwards in status? If a request fails, it retries, which could cause a document marked as completed to be marked as rejected if the rejection event is processed after the completion event.
switch (message.event) {
case webhookTypeEnums.DOCUMENT_OPENED:
await client.request(UPDATE_ESIGNATURE_DOCUMENT, {
external_document_id: message.payload?.payload?.id?.toString(),
esig_update: {
status: "OPENED",
opened: true,
}
})
break;
case webhookTypeEnums.DOCUMENT_REJECTED:
await client.request(UPDATE_ESIGNATURE_DOCUMENT, {
external_document_id: message.payload?.payload?.id?.toString(),
esig_update: {
status: "REJECTED",
rejected: true,
}
})
break;
case webhookTypeEnums.DOCUMENT_CREATED:
//This is largely a throwaway event we know it was created.
console.log("Document created event received. Document ID:", message.payload.documentId);
console.log("Document created event received. Document ID:", message.payload?.payload?.documentId);
// Here you can add any additional processing you want to do when a document is created
break;
case webhookTypeEnums.DOCUMENT_COMPLETED:
console.log("Document completed event received. Document ID:", message.payload.documentId);
console.log("Document completed event received. Document ID:", message.payload?.payload?.documentId);
await handleDocumentCompleted(message.payload);
// Here you can add any additional processing you want to do when a document is completed
break;
case webhookTypeEnums.DOCUMENT_SIGNED:
console.log("Document signed event received. Document ID:", message.payload.documentId);
console.log("Document signed event received. Document ID:", message.payload?.payload?.documentId);
// Here you can add any additional processing you want to do when a document is signed
await client.request(UPDATE_ESIGNATURE_DOCUMENT, {
external_document_id: message.payload?.payload?.id?.toString(),
esig_update: {
status: "SIGNED",
}
})
break;
default:
console.log(`Unhandled event type: ${message.event}`);
res.status(200).json({ message: "Unsupported event type." });
logger.log(`esig-webhook-received-unknown`, "ERROR", "redis", "api", {
event: message.event,
body: message
});
return;
}
// const result = await documenso.documents.download({
// documentId: req.body.payload.id,
// });
// result.resultingBuffer = Buffer.from(result.resultingArrayBuffer);
// // Save the document to a file for testing purposes
// const downloadsDir = path.join(__dirname, '../downloads');
// if (!fs.existsSync(downloadsDir)) {
// fs.mkdirSync(downloadsDir, { recursive: true });
// }
// const filePath = path.join(downloadsDir, `document_${req.body.payload.id}.pdf`);
// fs.writeFileSync(filePath, result.resultingBuffer);
// console.log(result)
logger.log(`esig-webhook-processed`, "INFO", "redis", "api", { event: message.event, documentId: message.payload?.payload?.id, jobid: message.payload?.payload?.externalId?.split("|")[0] || null });
res.sendStatus(200)
} catch (error) {
@@ -69,25 +87,14 @@ async function esignWebhook(req, res) {
message: error.message, stack: error.stack,
body: req.body
});
// const downloadsDir = path.join(__dirname, '../downloads');
// if (!fs.existsSync(downloadsDir)) {
// fs.mkdirSync(downloadsDir, { recursive: true });
// }
// const filePath = path.join(downloadsDir, `document_${req.body.payload.id}.pdf`);
// fs.writeFileSync(filePath, Buffer.from(err.body));
// console.error("Error handling esign webhook:", err);
res.sendStatus(500)
res.status(500).json({ message: "Error processing webhook event.", error: error.message });
}
}
async function handleDocumentCompleted(payload = sampleComplete) {
//Check if the bodyshop is on image proxy or not
try {
//Split the external id to get the uploaded user.
const [jobid, uploaded_by] = payload.externalId.split("|");
if (!jobid || !uploaded_by) {
throw new Error(`Invalid externalId format. Expected "jobid|uploaded_by", got "${payload.externalId}"`);
}
@@ -106,7 +113,7 @@ async function handleDocumentCompleted(payload = sampleComplete) {
let key = `${jobs_by_pk.bodyshop.id}/${jobs_by_pk.id}/${replaceAccents(document.filename).replace(/[^A-Z0-9]+/gi, "_")}-${new Date().getTime()}.pdf`;
if (jobs_by_pk?.bodyshop?.uselocalmediaserver) {
//LMS not yet implemented.
//TODO:LMS not yet implemented.
} else {
//S3 Upload
@@ -125,8 +132,15 @@ async function handleDocumentCompleted(payload = sampleComplete) {
s3Key: key,
bucket: uploadResult.bucket
});
const auditEntry = await client.request(INSERT_ESIG_AUDIT_TRAIL, {
obj: {
await client.request(DISTRIBUTE_ESIGNATURE_DOCUMENT, {
external_document_id: payload.id.toString(),
esig_update: {
status: "COMPLETED",
completed: true,
completed_at: new Date().toISOString()
},
audit: {
jobid: jobs_by_pk.id,
bodyshopid: jobs_by_pk.bodyshop.id,
operation: `Esignature document with title ${payload.title} (ID: ${payload.documentMeta.id}) has been completed.`,
@@ -134,8 +148,10 @@ async function handleDocumentCompleted(payload = sampleComplete) {
type: 'esig-complete'
}
})
//insert the document record with the s3 key and bucket info.
await client.request(INSERT_ESIGNATURE_DOCUMENT, {
await client.request(INSERT_ESIGNATURE_COMPLETED_DOCOUMENT, {
docInput: {
jobid: jobs_by_pk.id,
uploaded_by: uploaded_by,

View File

@@ -3261,6 +3261,45 @@ exports.QUERY_JOB_FOR_SIGNATURE = `query QUERY_JOB_FOR_SIGNATURE($jobid: uuid!)
}
}
`
exports.INSERT_ESIGNATURE_DOCUMENT = `mutation INSERT_ESIGNATURE_DOCUMENT($audit: audit_trail_insert_input!, $esig: esignature_documents_insert_input!) {
insert_audit_trail_one(object: $audit) {
id
}
insert_esignature_documents_one(object: $esig){
id
}
}`
exports.QUERY_ESIGNATURE_BY_EXTERNAL_ID = `query QUERY_ESIGNATURE_BY_EXTERNAL_ID($external_document_id: String!) {
esignature_documents(where: {external_document_id: {_eq: $external_document_id}}) {
id
jobid
external_document_id
}
}`
exports.DISTRIBUTE_ESIGNATURE_DOCUMENT = `mutation DISTRIBUTE_ESIGNATURE_DOCUMENT($external_document_id: String!, $esig_update: esignature_documents_set_input!, $audit: audit_trail_insert_input!) {
insert_audit_trail_one(object: $audit) {
id
}
update_esignature_documents(where: {external_document_id: {_eq: $external_document_id}}, _set: $esig_update) {
affected_rows
returning {
id
}
}
}
`
exports.UPDATE_ESIGNATURE_DOCUMENT = `mutation UPDATE_ESIGNATURE_DOCUMENT($external_document_id: String!, $esig_update: esignature_documents_set_input!) {
update_esignature_documents(where: {external_document_id: {_eq: $external_document_id}}, _set: $esig_update) {
affected_rows
returning {
id
}
}
}
`
exports.INSERT_ESIG_AUDIT_TRAIL = `mutation INSERT_ESIG_AUDIT_TRAIL($obj: audit_trail_insert_input!) {
insert_audit_trail_one(object: $obj) {
@@ -3283,7 +3322,7 @@ exports.QUERY_META_FOR_ESIG_COMPLETION = `query QUERY_META_FOR_ESIG_COMPLETION($
}
}`
exports.INSERT_ESIGNATURE_DOCUMENT = `mutation INSERT_ESIGNATURE_DOCUMENT($docInput: documents_insert_input!) {
exports.INSERT_ESIGNATURE_COMPLETED_DOCOUMENT = `mutation INSERT_ESIGNATURE_COMPLETED_DOCOUMENT($docInput: documents_insert_input!) {
insert_documents_one(object: $docInput) {
id
name

View File

@@ -3,7 +3,7 @@ const router = express.Router();
const validateFirebaseIdTokenMiddleware = require("../middleware/validateFirebaseIdTokenMiddleware");
const withUserGraphQLClientMiddleware = require("../middleware/withUserGraphQLClientMiddleware");
const { newEsignDocument, distributeDocument, deleteDocument } = require("../esign/esign-new");
const { newEsignDocument, distributeDocument, viewDocument, deleteDocument } = require("../esign/esign-new");
const { esignWebhook } = require("../esign/webhook");
//router.use(validateFirebaseIdTokenMiddleware);
@@ -11,6 +11,7 @@ const { esignWebhook } = require("../esign/webhook");
router.post("/new", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, newEsignDocument);
router.post("/distribute", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, distributeDocument);
router.post("/delete", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, deleteDocument);
router.post("/view", validateFirebaseIdTokenMiddleware, withUserGraphQLClientMiddleware, viewDocument);
router.post("/webhook", esignWebhook);