Salesforce File Migration (ContentVersion) via Node.js

As part of a project I was assigned the task to move content version from one org (legacy) to another. For this purpose I used node.js as my go-to language to achieve this. First task was to download the file from legacy org and then subsequently upload the same to the active org without storing the file in the local file system.

One important thing to note here is to insert the files in content version there are 2 different approaches
– Use the JSON structure to insert a record in the content version for content lesser than 37Mb.
– Use the multipart method to insert the blob if the size of the content is greater than 37Mb.

In the below code you will see that I am fetching the latest content version file and inserting it back in the same org. But you can obviously connect with another org and push the file into that org.

const jsforce = require("jsforce");
const axios = require("axios");
var FormData = require("form-data");
const getStream = require("get-stream");
const mime = require("mime-types");
const conn = new jsforce.Connection({
loginUrl: "https://login.salesforce.com",
});
const username = "";
const password = "";
const main = async () => {
await conn.login(username, password);
const sourceContentVersionFile =
await conn.query(`SELECT Id, Title, ContentSize, VersionData, PathOnClient
FROM ContentVersion
ORDER BY CreatedDate DESC
LIMIT 1`);
const contentVersionRecord = sourceContentVersionFile.records[0];
if (contentVersionRecord.ContentSize > 37000000) {
// size greater than 37Mb use multipart blob insert.
const fileStream = await getFile(contentVersionRecord, false);
const formData = createFormData(contentVersionRecord, fileStream);
const URL =
conn.instanceUrl + "/services/data/v51.0/sobjects/ContentVersion";
await axios({
method: "post",
maxContentLength: Infinity,
maxBodyLength: Infinity,
url: URL,
headers: {
Authorization: "Bearer " + conn.accessToken,
"Content-Type": `multipart/form-data; boundary=\"boundary_string\"`,
},
data: formData,
});
} else {
const base64Body = await getFile(contentVersionRecord, true);
await conn.sobject("ContentVersion").insert({
Title: contentVersionRecord.Title,
PathOnClient: contentVersionRecord.PathOnClient,
VersionData: base64Body,
FirstPublishLocationId: "0012w00000rTbXNAA0", //Id to which the content version needs to be linked
Origin: "H",
});
}
};
const getFile = async (data, generateBase64String) => {
const file = await axios({
method: "get",
url: conn.instanceUrl + data.VersionData,
headers: {
Authorization: "Bearer " + conn.accessToken,
},
responseType: "stream",
});
if (generateBase64String) {
return await getStream(file.data, { encoding: "base64" });
} else {
return file.data; // return the stream;
}
};
const createFormData = (data, file) => {
const contentVersion = {
FirstPublishLocationId: "0012w00000rTbXNAA0",
Title: data.Title,
PathOnClient: data.PathOnClient,
Origin: "H",
};
const form = new FormData();
form.setBoundary("boundary_string");
form.append("entity_content", JSON.stringify(contentVersion), {
contentType: "application/json",
});
form.append("VersionData", file, {
filename: data.PathOnClient,
contentType: mime.lookup(data.PathOnClient),
});
return form;
};
main();
view raw main.js hosted with ❤ by GitHub

Couple of things:
– The above code shows the migration of a single file, however the same logic can be used to migrate any number of files.
– I am pulling and pushing the file in the same org. You can however connect to multiple salesforce Orgs and move the file around.

Hope it helps!

2 thoughts on “Salesforce File Migration (ContentVersion) via Node.js”

  1. Hey, I am trying to do a salesforce file migration for attachment from one org to another
    const sfAuth = require(‘./sfAuth.js’);
    const axios = require(‘axios’);
    const getStream = require(‘get-stream’);
    var fs= require(“fs”);
    var jsforce = require(‘jsforce’);
    var username = ”;
    var password = ”;
    const conn1 = new jsforce.Connection({
    loginUrl: “https://test.salesforce.com”
    });
    doProcessing();
    async function doProcessing(){
    let data = [];
    const conn = await sfAuth();
    await fetchData(conn);
    }
    async function fetchData(conn){
    await conn1.login(username,password);
    var records = [];
    await conn.query(“SELECT Id, Name FROM Attachment”)
    .on(“record”, function(record) {
    records.push(record);
    })
    .on(“end”, function() {
    console.log(“total in database : ” + records.length);
    createAttachment(records,conn);
    })
    .on(“error”, function(err) {
    console.error(err);
    })
    .run({ autoFetch : true, maxFetch : 10 });
    }
    async function createAttachment(records,conn){
    var attachmentId = [];
    var k = 0;
    var attrest;
    var flds1 = {};
    var record = [];
    for(var i=k;i<records.length;i++){
    var recs = [];
    if(attachmentId.length<10){
    attachmentId.push(records[i].Id)
    }
    if(attachmentId.length === 10){
    await conn.sobject("Attachment").select('Id,Name,Body,BodyLength,ParentId,ContentType').where("Id in ('" + attachmentId.join("','") + "')")
    .execute(function(err, records) {
    console.log("total in database loop : " + records.length);
    //Once resolved change 1 to records.length
    for (var i=0; i {
    console.log(‘The data is’+JSON.stringify(data)+’ ‘+conn1.accessToken);
    const file = await axios({
    method: ‘get’,
    url: conn1.instanceUrl + data.Body,
    headers: {
    Authorization: “Bearer ” + conn1.accessToken,
    },
    ContentType: data.ContentType
    });
    return await getStream(file.data, { encoding: “base64” });
    };
    But attachment record file is blank. Could you please check and let me know where I am wrong.

Leave a Reply to techinjungle Cancel reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out /  Change )

Twitter picture

You are commenting using your Twitter account. Log Out /  Change )

Facebook photo

You are commenting using your Facebook account. Log Out /  Change )

Connecting to %s

%d bloggers like this: