/**
* Copyright (c) 2019 Oracle and/or its affiliates. All rights reserved.
* Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl.
*/
/* globals app, module, __dirname */
/* jshint esversion: 6 */
var translationProvider = require('../provider/translationProvider').factory.create(),
filterApi = require('./translationFilter'),
persistenceStore = require('./persistenceStore').factory.create();
/**
* Import all Assets & Site files into the Language Service Provider. <br/>
* The steps involved are: <br/>
* <ul>
* <li>For each site & assets file:
* <ul>
* <li>Load up and parse the file's JSON </li>
* <li>Filter the file to remove all non-translatable strings</li>
* <li>Handle empty files, these aren't sent to the Language Service Provider</li>
* <li>Import the filtered non-empty file into the Language Service Provider and retrieve the documentId for the file.</li>
* <li>Create a metadata file in the persistence store that contains the documentId for the file so the translated strings can be later merged with the original file</li>
* <li>Wait for the file to be imported before continuing with the next file</li>
* </ul>
* </ul>
* @constructor
* @alias SampleFileImporter
*/
var SampleFileImporter = function () {};
SampleFileImporter.prototype.EMPTY_DOCUMENT_ID = 'emptyDocument';
// Get include fields.
// Get content type from the given content.
// Lookup include fields of the content type from the contentTypes object.
var getContentIncludeFields = function (fileContentJson, contentTypes) {
var includes = [];
if (contentTypes) {
if (fileContentJson.hasOwnProperty('type')) {
var type = fileContentJson.type;
if (type) {
contentTypes.filter(function(contentType) {
return type === contentType.name;
}).map(function (contentType) {
contentType.fields.forEach(function (field) {
if (field.translation && field.translation.translate) {
includes.push(field.name);
}
});
});
}
}
}
return includes;
};
/**
* Import a binary file into the Language Service Provider. <br/>
* The steps involved are: <br/>
* <ul>
* <li>For the binary file of each asset:
* <ul>
* <li>Import the binary file into the Language Service Provider and retrieve the documentId for the file.</li>
* <li>Create a metadata file in the persistence store that contains the documentId for the file so the translated binary file can be later added to the translated package.</li>
* <li>Wait for the file to be imported before continuing with the next file</li>
* </ul>
* </ul>
* @param {SampleJobManager.JobConfig} jobConfig - The configuration of the connector job to run. This information is held as metadata in the connector for the job.
* @param {string} projectId - The Language Service Provider project identifier corresponding to this job.
* @param {SampleJobManager.file} file - Details on the site or assets file to import
* @param {('site'|'assets')} fileType - The type of file being imported
* @returns {Promise.<persistenceStore.fileMetadata>} The metadata containing information about the file and the imported document created for the file
*/
SampleFileImporter.prototype.importBinaryFile = function (jobConfig, projectId, file, sourceLanguage, fileType) {
var self = this;
return new Promise(function (resolve, reject) {
var binaryFileSpec = persistenceStore.getBinaryFileSpec({
jobId: jobConfig.properties.id,
fileType: fileType,
file: file
});
// add the binary file
translationProvider.addBinaryFile(projectId, jobConfig.authToken, binaryFileSpec, file, sourceLanguage, jobConfig.additionalData).then(function (documentEntry) {
var checkImportProcess = function (id) {
translationProvider.getDocumentImportProcess(jobConfig.authToken, id).then(function (process) {
if (process.properties.status !== 'FAILED' && process.properties.progress < 100) {
console.log('SampleFileImporter.importBinaryFile(): import process at', process.properties.progress, 'percent for file -', file.name);
// Retry until import process has completed.
setTimeout(function () {
checkImportProcess(id);
}, 3000);
}
else {
var writeMetadata = function() {
// write the file meta-data so we can get the document for the file once translated
persistenceStore.createFileMetadata({
jobId: jobConfig.properties.id,
jobType: jobConfig.jobType,
fileType: fileType,
file: file
}).then(function (fileMetadata) {
resolve(fileMetadata);
}).catch(function (error) {
console.log('SampleFileImporter.importBinaryFile(): unable to create meta-data file for file - ' + file.name);
reject(error);
});
};
if (process.properties.status === 'FAILED') {
var error = 'import process FAILED for file - ' + file.name;
console.log('SampleFileImporter.importBinaryFile():', error);
// Mark the job as failed.
reject(error);
// Another option is to mark the file an empty document.
// In the download phase, the source binary file would be copied to the target locale folders.
// E.g.
// file.documentId = self.EMPTY_DOCUMENT_ID;
// writeMetadata();
}
else {
// console.log('SampleFileImporter.importBinaryFile(): import process completed for file -', file.name, 'id', id);
file.documentId = id;
writeMetadata();
}
}
}).catch(function(getDocImportProcessError) {
console.log('importBinaryFile getDocumentImportProcess error', getDocImportProcessError);
console.log('SampleFileImporter.importBinaryFile(): unable to get import process for file -', file.name);
reject(getDocImportProcessError);
});
};
checkImportProcess(documentEntry.properties.id);
}).catch(function (error) {
console.log('SampleFileImporter.importBinaryFile(): error importing file - ' + file.name);
console.log(error);
reject(JSON.stringify(error));
});
});
};
/**
* Import a file into the Language Service Provider. <br/>
* The steps involved are: <br/>
* <ul>
* <li>For each site & assets file:
* <ul>
* <li>Load up and parse the file's JSON </li>
* <li>Filter the file to remove all non-translatable strings</li>
* <li>Handle empty files, these aren't sent to the Language Service Provider</li>
* <li>Import the filtered non-empty file into the Language Service Provider and retrieve the documentId for the file.</li>
* <li>Create a metadata file in the persistence store that contains the documentId for the file so the translated strings can be later merged with the original file</li>
* <li>Wait for the file to be imported before continuing with the next file</li>
* </ul>
* </ul>
* @param {SampleJobManager.JobConfig} jobConfig - The configuration of the connector job to run. This information is held as metadata in the connector for the job.
* @param {string} projectId - The Language Service Provider project identifier corresponding to this job.
* @param {SampleJobManager.file} file - Details on the site or assets file to import
* @param {('site'|'assets')} fileType - The type of file being imported
* @param {SampleJobManager.contentType[]} contentTypes - Array of content types.
* @returns {Promise.<persistenceStore.fileMetadata>} The metadata containing information about the file and the imported document created for the file
*/
SampleFileImporter.prototype.importFile = function (jobConfig, projectId, file, sourceLanguage, fileType, contentTypes) {
var self = this;
return new Promise(function (resolve, reject) {
// meta-data for the file is created after the file is imported
// see if the meta-data for this file already exists
persistenceStore.getFileMetadata({
jobId: jobConfig.properties.id,
fileType: fileType,
file: file
}).then(function (fileMetadata) {
// meta-data already exists so the file has already been imported, were done
resolve(fileMetadata);
}).catch(function (error) {
// read in the file
persistenceStore.getSourceFile({
jobId: jobConfig.properties.id,
fileType: fileType,
filePath: file.path
}).then(function (fileContents) {
var originalFile = fileContents && fileContents.length > 0 ? JSON.parse(fileContents) : '';
// ToDo: Handle filtered empty files
if (!originalFile || Object.keys(originalFile).length === 0) {
var message = 'SampleFileImporter.importFile(): file is empty - ' + file.name;
console.log(message);
// Empty files are sent to the LSP, use 'emptyDocument' as documentId so that we don't try to retrieve it
file.documentId = self.EMPTY_DOCUMENT_ID;
// write the file meta-data so we can get the document for the file once translated
persistenceStore.createFileMetadata({
jobId: jobConfig.properties.id,
jobType: jobConfig.jobType,
fileType: fileType,
file: file
}).then(function (fileMetadata) {
resolve(fileMetadata);
}).catch(function (error) {
console.log('SampleFileImporter.importFile(): unable to create meta-data file for file - ' + file.name);
reject(error);
});
} else {
var includes = getContentIncludeFields(originalFile, contentTypes);
// extract only the translatable properties from the document
var content = JSON.stringify(filterApi.getTranslatableProperties(originalFile, fileType, includes));
// add the document
translationProvider.addDocument(projectId, jobConfig.authToken, file.name, content, sourceLanguage, jobConfig.additionalData).then(function (documentEntry) {
var checkImportProcess = function (id) {
translationProvider.getDocumentImportProcess(jobConfig.authToken, id).then(function (process) {
if (process.properties.status !== 'FAILED' && process.properties.progress < 100) {
console.log('SampleFileImporter.importFile(): import process at', process.properties.progress, 'percent for file -', file.name);
// Retry until import process has completed.
setTimeout(function () {
checkImportProcess(id);
}, 3000);
}
else {
var writeMetadata = function() {
// write the file meta-data so we can get the document for the file once translated
persistenceStore.createFileMetadata({
jobId: jobConfig.properties.id,
jobType: jobConfig.jobType,
fileType: fileType,
file: file
}).then(function (fileMetadata) {
resolve(fileMetadata);
}).catch(function (error) {
console.log('SampleFileImporter.importFile(): unable to create meta-data file for file - ' + file.name);
reject(error);
});
};
if (process.properties.status === 'FAILED') {
var error = 'import process FAILED for file - ' + file.name;
console.log('SampleFileImporter.importFile():', error);
// Mark the job as failed.
reject(error);
}
else {
// console.log('SampleFileImporter.importFile(): import process completed for file -', file.name, 'id', id);
file.documentId = id;
writeMetadata();
}
}
}).catch(function(getDocImportProcessError) {
console.log('importFile getDocumentImportProcess error', getDocImportProcessError);
console.log('SampleFileImporter.importFile(): unable to get import process for file -', file.name);
reject(getDocImportProcessError);
});
};
checkImportProcess(documentEntry.properties.id);
}).catch(function (httpError) {
var error = {
errorCode: httpError && httpError.statusCode || 500,
errorMessage: httpError && httpError.statusMessage || 'unknown error'
};
console.log('SampleFileImporter.importFile(): error importing file - ' + file.path);
console.log(JSON.stringify(error));
reject(error);
});
}
});
});
});
};
/**
* Create a list of functions that returns a promise for each file in the list.<br/>
* The promises are chained in a "return p1.then(return p2.then(return p3.then(...)))" model to avoid overloading the Language Service Provider.
* @param {SampleJobManager.JobConfig} jobConfig - The configuration of the connector job to run. This information is held as metadata in the connector for the job.
* @param {SampleJobManager.JobDetails} jobDetails - The details of the combined job.json files.
* @param {('site'|'assets')} fileType - The type of file being imported
* @param {SampleJobManager.file[]} files - Array of files to import.
* @param {SampleJobManager.contentType[]} contentTypes - Array of content types.
* @returns {function[]} An array of functions, each of which returns a promise that resolves when the file is imported.
*/
SampleFileImporter.prototype.importFileList = function (jobConfig, jobDetails, fileType, files, contentTypes) {
var self = this;
// create the array of functions to return the import promises
return (files || []).map(function (file) {
return function () {
return self.importFile(jobConfig, jobConfig.properties.projectId, file, jobDetails.sourceLanguage, fileType, contentTypes);
};
});
};
SampleFileImporter.prototype.importFolderList = function (jobConfig, jobDetails, fileType, binaryFiles) {
var self = this;
return binaryFiles.map(function(dirent) {
return function () {
return self.importBinaryFile(jobConfig, jobConfig.properties.projectId, dirent, jobDetails.sourceLanguage, fileType);
};
});
};
/**
* Import all the Site & Assets files into the Language Service Provider for this job <br/>
* The promises are chained in a "return p1.then(return p2.then(return p3.then(...)))" model to avoid overloading the Language Service Provider.
* @param {SampleJobManager.JobConfig} jobConfig - The configuration of the connector job to run. This information is held as metadata in the connector for the job.
* @param {SampleJobManager.JobDetails} jobDetails - The details of the combined job.json files.
* @returns {Promise} A Promise that resolves when all files have been imported.
*/
SampleFileImporter.prototype.importFiles = function (jobConfig, jobDetails) {
var self = this;
return new Promise(function (resolve, reject) {
// make sure necessary files are there and then import all the JSON files into the LSP server
if (jobDetails.sourceLanguage && jobDetails.targetLanguages) {
// import the assets & the site into the LSP
var importPromises = [];
if (jobDetails.assets) {
importPromises = importPromises.concat(self.importFileList(jobConfig, jobDetails, 'assets', jobDetails.assets && jobDetails.assets.files, jobDetails.assets && jobDetails.assets.contentTypes));
importPromises = importPromises.concat(self.importFolderList(jobConfig, jobDetails, 'assets', jobDetails.assets && jobDetails.assets.binaryFiles));
}
if (jobDetails.site) {
importPromises = importPromises.concat(self.importFileList(jobConfig, jobDetails, 'site', jobDetails.site && jobDetails.site.files));
}
// now run through and import all the files
// chain the promises in the array so that they execute as: return p1.then(return p2.then(return p3.then(...)));
var doImport = importPromises.reduce(function (previousPromise, nextPromise) {
return previousPromise.then(function () {
// wait for the previous promise to complete and then return a new promise for the next
return nextPromise();
});
},
// Start with a previousPromise value that is a resolved promise
Promise.resolve());
// once all files are imported, can continue
doImport.then(function () {
console.log('SampleFileImporter.importFiles(): all files imported');
resolve();
}).catch(function (e) {
console.log('SampleFileImporter.importFiles(): error importing files');
console.log(e);
reject('error importing files');
});
} else {
reject('SampleFileImporter.importFiles(): no source language specified in job.json files');
}
});
};
// Export the mock translator
module.exports = new SampleFileImporter();