Overview:
Webhooks are POST requests made to an endpoint each time a certain task is completed, using the result of the task as the payload. They are divided into topics and job types. Currently, supported topics and job types are:
- Order created
- Job complete
- Composite
- Export
- Import
- Render Vray
- Render Webgl
For the Job complete topic you can also further specify one or more completion status. That can be useful for handling things differently whether a job has failed or succeeded.
On the following sections we will focus on specific use cases for certain webhooks and their implementation.
Post-processing imported assets
Webhook: Topic - Job complete | Type: Import | Status: Success
One common automation requirement of projects is to do some type of post processing to imported assets based on the asset type and some sort of naming convention. Because the file name can be controlled by the user before uploading the file to the platform, we can take advantage of it.
Let’s look at an example of adding tags and metadata to assets after they are uploaded to the platform. The first step would be to subscribe to the topic and point it to the url of where you will host your endpoint.
If you want to use your local server to develop, you might need to use port forwarding to allow posting to your external ip address. Here is an article describing how to accomplish that on linux.
After you subscribe, the next step is to get your server up. In this example we will use a simple node server:
Here is an example of a payload for a successful import job:
{
"topic": "jobs",
"action": "updated",
"data": {
"id": "785d7e7c-2958-43c6-8658-7653c3015420",
"createdBy": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"orgId": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"title": "Import",
"schedulerState": "active",
"type": "import",
"createdAt": "2020-05-22T16:24:26.265Z",
"updatedAt": "2020-05-22T16:24:26.277Z",
"parameters": {},
"taskStatusPending": 1,
"taskStatusRunning": 0,
"taskStatusStopped": 0,
"status": "success",
"taskProgress": 0,
"taskCount": 1,
"taskResultSuccesses": 0,
"taskResultFailures": 0,
"priority": 0,
"tasks": [
{
"id": "03160180-db96-40bd-ab84-d200d6728596",
"createdBy": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"title": "Import",
"type": "import",
"createdAt": "2020-05-22T16:24:26.271Z",
"parameters": { "fileId": "63ef52d4-3753-4097-b110-3ba5250f33ad" },
"orgId": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"jobId": "785d7e7c-2958-43c6-8658-7653c3015420",
"reportProgress": true,
"updatedAt": "2020-05-22T16:24:49.992Z",
"runStatus": "stopped",
"runResult": "Success",
"runProgress": 1,
"runs": [
{
"id": "a499da29-e862-40dc-97d9-679b976aae93",
"taskId": "03160180-db96-40bd-ab84-d200d6728596",
"createdAt": "2020-05-22T16:24:29.350Z",
"updatedAt": "2020-05-22T16:24:49.987Z",
"stoppedAt": "2020-05-22T16:24:49.972Z",
"results": {
"files": [
{
"id": "fa79c338-3186-4bc8-a9b2-090e44502fd9",
"name": "result.json"
}
],
"stderr": {
"id": "b60b5d35-1f45-43af-b83c-7fa5c986c73f",
"name": "stderr"
},
"stdout": {
"id": "dd474cd9-4047-4a02-bed4-4c05dbbd89b6",
"name": "stdout"
}
},
"resultCode": "Success",
"resultMessage": null,
"error": null,
"progress": 1,
"machine": null,
"startedAt": "2020-05-22T16:24:29.339Z",
"GPUTime": null,
"CPUTime": null,
"jobId": "785d7e7c-2958-43c6-8658-7653c3015420",
"reportProgress": true,
"orgId": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"createdBy": "bed6e3c9-e781-42ac-a676-90b0e5ab4c33",
"RAMUsage": null,
"wallClockTime": null,
"requestCPUs": 1,
"limitCPUs": null,
"nodeCPUs": 8,
"requestRAM": "11811160064",
"limitRAM": "11811160064",
"nodeRAM": "31615754240",
"cpuPlatform": "Intel Broadwell",
"machineType": "n1-standard-8",
"gpuType": null,
"instanceID": "1435505946538957800",
"zone": "us-east4-b",
"preemptible": true,
"podId": "b43efff6-9c48-11ea-ab79-42010a960075"
}
]
}
]
},
"webhook": "494add29-dfdd-4243-b96b-e6a7602a0212"
}
From that, we are interested in the result.json file for the only task/run that was executed by this job which we are fetching using the getFile function. Here are some example of of what that response content looks like:
Textures:
Model:
After we get the results object we pass it to our handler function which will perform some post processing of assets which will rely on the asset name and type.
Common use cases for this are:
- Adding metadata to the asset, so it can be fetched by a metadata query action.
- Triggering renders after textures are updated.
- Modifying the review tags on catalog items that use these assets.
Here is an example of implementation:
const getAssetList = require('../task/getAssetList');
const updateAsset = require('../task/updateAsset');
const deleteAssets = require('../task/deleteAssets');
const renderAsset = require('../task/renderConfigurationLayer');
const textureRegex = /^[0-9]{3,4}_.+/;
const modelRegex = /^(Classic|Modern|Tuxedo).+_.+/;
const RENDER_ASSETS = {
SUIT: {
stageId: '',
assetId: '',
width: 2500,
},
SWATCH: {
stageId: '',
assetId: '',
width: 1500,
},
};
const orgId = '';
const authToken = '';
const handler = async (result) => {
if (
result.texture &&
result.texture.length > 0 &&
textureRegex.test(resultObj.texture[0].name)
) {
await updateTextures(result);
}
if (
result.model &&
result.model.length > 0 &&
modelRegex.test(resultObj.model[0].name)
) {
await updateModels(result);
}
};
const updateModels = async (resultObj) => {
const assignMetadata = (name, value) => ({
type: 'String',
name,
values: [],
defaultValue: value,
});
const ids = resultObj.model.reduce((acc, item) => {
acc[item.assetId] = item;
return acc;
}, {});
const models = await getAssetList(orgId, authToken, { type: 'model' });
const matchingModels = models.filter((model) => ids[model.product.id]);
matchingModels.forEach((model) => {
const splitName = model.product.name.split('_');
if (splitName.length % 2 === 1 || splitName.length === 2) {
model.product.tags = [splitName[0], splitName[1]];
const newMeta = [
assignMetadata('Product', splitName[0]),
assignMetadata('Item', splitName[1]),
];
for (let i = 1; i < splitName.length - 1; i += 2) {
newMeta.push(assignMetadata(splitName[i], splitName[i + 1]));
}
model.product.metadata = newMeta;
}
});
if (resultObj.material && resultObj.material.length > 0) {
const materials = resultObj.material.map((mat) => mat.assetId);
deleteAssets(materials, orgId, authToken);
}
return updateAsset(matchingModels, orgId, authToken);
};
const updateTextures = async (resultObj, orgId, authToken) => {
const promises = [];
promises.push(getAssetList(orgId, authToken, { tags: 'fabric' }));
promises.push(getAssetList(orgId, authToken, { type: 'texture' }));
const [catalogList, textures] = await Promise.all(promises);
const fabricsCatalog = catalogList.reduce((acc, item) => {
acc[item.product.name] = item;
return acc;
}, {});
const fabricCode = resultObj.texture[0].name.split('_')[0];
const resultTextures = resultObj.texture;
const matchingTextures = textures.filter((texture) =>
resultTextures.some((result) => result.name === texture.product.name)
);
resultTextures.forEach((result) => {
if (
!matchingTextures.some((texture) => texture.product.id === result.assetId)
)
throw new Error(
`${result.name} assetId: ${result.assetId} not found in asset library.`
);
});
const updates = resultTextures.map((texture) => {
const splitName = texture.name.split('_');
return {
query: {
id: texture.assetId,
},
product: {
metadata: [
{
type: 'String',
name: 'fabric',
values: [],
defaultValue: splitName[0],
},
{
type: 'String',
name: 'attribute',
values: [],
defaultValue: splitName[1],
},
],
},
};
});
const item = fabricsCatalog[fabricCode].product;
const clientRejectedIndex = item.tags.findIndex(
(item) => item === '_client-rejected'
);
if (clientRejectedIndex !== -1)
item.tags[clientRejectedIndex] = '_client-pending';
['_tab-material', '_review', '_artist-ready'].forEach((tag) => {
if (!item.tags.includes(tag)) item.tags.push(tag);
});
if (!item.metadata.some((item) => item.name === '*type'))
item.metadata.push({
type: 'String',
name: '*type',
values: [],
defaultValue: 'material',
});
if (!item.metadata.some((item) => item.name === '*attr'))
item.metadata.push({
type: 'String',
name: '*attr',
values: [],
defaultValue: 'Fabric',
});
updates.push(fabricsCatalog[fabricCode]);
const texturesForDeletion = matchingTextures
.filter(
(texture) =>
!resultTextures.some((result) => result.assetId === texture.product.id)
)
.map((texture) => texture.product.id);
deleteAssets(texturesForDeletion, orgId, authToken);
const fabricAssetId = fabricsCatalog[fabricCode].product.id;
return updateAsset(updates, orgId, authToken).then(() => {
Object.entries(RENDER_ASSETS).forEach(
async ([type, { assetId, stageId, width }]) => {
console.log(`Rendering ${fabricCode}(${fabricAssetId}) for ${type}`);
const options = {
configuration: {
Fabric: { assetId: fabricAssetId },
},
stageId,
width: width || 1000,
};
renderAsset(assetId, orgId, authToken, options);
}
);
});
};
module.exports = handler;
These are the support functions used in this code: