Source: job-manager/sampleFilePersistenceStore.js

Source: job-manager/sampleFilePersistenceStore.js

/**
 * Copyright (c) 2019 Oracle and/or its affiliates. All rights reserved.
 * Licensed under the Universal Permissive License v 1.0 as shown at http://oss.oracle.com/licenses/upl.
 */
/* globals app, module, __dirname */
/* jshint esversion: 8 */
var fs = require('fs'),
	path = require('path'),
	persistenceStoreApi = require('./persistenceStore').api,
	extract = require('extract-zip');
var gulp = require('gulp'),
	zip = require('gulp-zip');

// initialize the "out" folder for the persistence data
var persistenceDir = path.join(__dirname, 'out');
if (!fs.existsSync(persistenceDir)) {
	fs.mkdirSync(persistenceDir);
}
var connectorDir = path.join(persistenceDir, 'connector-data');
if (!fs.existsSync(connectorDir)) {
	fs.mkdirSync(connectorDir);
}
var translationJobsDir = path.join(connectorDir, 'translation-jobs');
if (!fs.existsSync(translationJobsDir)) {
	fs.mkdirSync(translationJobsDir);
}

/**
 * Manage persistence of the job during the jobs lifecycle. <br/>
 * This module is responsible for saving the state of the job and managing the state during startup/shutdown & failover. <br/>
 * <ul>
 *   <li>Specifically, it needs to: 
 *     <ul>
 *       <li>Keep track of all the jobs.</li>
 *       <li>For each job: 
 *         <ul>
 *           <li>Store metadata about the job and mapping it to the project in the Language Service Provider.</li>
 *           <li>Store/Unpack the zip file provided by the OCE translation job.</li>
 *           <li>Store metadata about each file in the zip mapping the file to the entry in the Language Service Provider.</li>
 *           <li>Store all the translations for each of the files as they become available.</li>
 *           <li>Create a final zip of all the translated files in the format required for ingestion into the OCE translation jobs.</li>
 *           <li>On job delete, remove all artifacts associated with the job.</li>
 *         </ul>
 *       </li>
 *     </ul>
 *   </li>
 * </ul>
 * This is a sample implementation that uses the filesystem as the persistence store.  The structure on the filesystem is: 
 * <ul>
 *   <li>"connector-data" Folder for all the connector data.
 *     <ul>
 *       <li>"translation-jobs" Folder for all the translation jobs created. 
 *         <ul>
 *           <li>"translation-jobs" Folder for all the translation jobs created. 
 *           <ul>
 *             <li> {job1} Random identifier generated for each job creation
 *               <ul>
 *                 <li> "translation-source" Folder containing all the source files for the job</li>
 *                 <li> "translation-target" Folder containing all the combined translated files for the job</li>
 *               </ul>
 *             </li>
 *             <li> {job2} Random identifier generated for each job creation</li>
 *             <li> ...</li>
 *           </ul>
 *         </ul>
 *       </li>
 *     </ul>
 *   </li>
 * </ul>
 * @constructor
 * @alias SampleFilePersistenceStore
 * @augments PersistenceStoreInterface
 */
var SampleFilePersistenceStore = function () {};

SampleFilePersistenceStore.prototype = Object.create(persistenceStoreApi.prototype);

/** @inheritdoc */
SampleFilePersistenceStore.prototype.getAllJobs = function () {
	var self = this;

	return new Promise(function (resolve, reject) {
		// get all the jobs
		if (fs.existsSync(translationJobsDir)) {
			var jobDirs = fs.readdirSync(translationJobsDir),
				allJobs = [],
				allConfigs = [];

			// Create an array of promises for all the jobs
			allJobs = jobDirs.filter(function (dirName) {
				return dirName.startsWith('job');
			}).map(function (jobId) {
				// in this persistence API, the directory name is the same as the jobId 
				// create a function to return the promise to get the job config
				return function () {
					return self.getJob({
						jobId: jobId
					}).then(function (jobConfig) {
						// store the config
						allConfigs.push(jobConfig);

						// allow promise chaining
						return Promise.resolve();
					});
				};
			});

			// now run through and get all the job configs
			// chain the promises in the array so that they execute as: p1.then(p2.then(p3.then(...)));
			var getJobConfigs = allJobs.reduce(function (previousPromise, nextPromise) {
					return previousPromise.then(function () {
						// wait for the previous promise to complete and then return a new promise for the next job config
						return nextPromise();
					});
				},
				// Start with a previousPromise value that is a resolved promise 
				Promise.resolve());

			// wait until we have all available configs
			getJobConfigs.then(function () {
				resolve(allConfigs);
			}).catch(function (error) {
				console.log('SampleFilePersistenceStore.getAllJobs(): failed to get all jobs, returning what were able to get');
				resolve(allConfigs);
			});
		} else {
			// no jobs, return empty list
			resolve([]);
		}
	});
};

//
// Job CRUD
// 
/** @inheritdoc */
SampleFilePersistenceStore.prototype.createJob = function (args) {
	return new Promise(function (resolve, reject) {
		// create a job based on the name & random ID
		var jobName = args.jobName,
			workflowId = args.workflowId,
			authToken = args.authToken,
			additionalData = args.additionalData;

		// generate a random number directory for the job
		var jobId = 'job' + Math.floor(100000 + Math.random() * 900000);

		// create the job directory
		var jobDir = path.join(translationJobsDir, jobId);
		if (!fs.existsSync(jobDir)) {
			fs.mkdirSync(jobDir);
		}

		// write out the initial data
		var jobMetadataFile = path.join(jobDir, jobId + '.json'),
			jobMetadata = {
				name: jobName,
				workflowId: workflowId,
				authToken: authToken, // store the authToken so we can re-start the job
				status: 'CREATED',
				progress: 0,
				properties: {
					id: jobId
				},
				additionalData: additionalData
			};

		fs.writeFile(jobMetadataFile, JSON.stringify(jobMetadata), function (err) {
			if (err) {
				console.log('SampleFilePersistenceStore.createJob(): failed to write job.json file for: ' + jobId);
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				// return the generated job metadata
				resolve(jobMetadata);
			}
		});
	}).catch(function (err) {
		console.log('SampleFilePersistenceStore.createJob(): failed to create job directory for: ' + jobId);
		reject({
			errorCode: 500,
			errorMessage: JSON.stringify(err)
		});
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getJob = function (args) {
	return new Promise(function (resolve, reject) {
		// get the job folder
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			jobMetadataFile = path.join(jobDir, jobId + '.json');

		// read in the file
		if (fs.existsSync(jobMetadataFile)) {
			fs.readFile(jobMetadataFile, function (err, data) {
				if (err) {
					console.log('SampleFilePersistenceStore.getJob(): failed to read job.json file for: ' + jobId);
					reject({
						errorCode: 500,
						errorMessage: JSON.stringify(err)
					});
				} else {
					try {
						resolve(JSON.parse(data));
					} catch (parseErr) {
						console.log('SampleFilePersistenceStore.getJob(): failed to parse job.json file for: ' + jobId);
						reject({
							errorCode: 500,
							errorMessage: JSON.stringify(parseErr)
						});
					}
				}
			});
		} else {
			// no job file, reject
			var errorMessage = 'SampleFilePersistenceStore.getJob(): no job data available for job: ' + jobId;
			console.log(errorMessage);
			reject({
				errorCode: 500,
				errorMessage: errorMessage
			});
		}
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.updateJob = function (args) {
	var self = this;
	return new Promise(function (resolve, reject) {
		// update the job
		var jobId = args.properties.id,
			jobDir = path.join(translationJobsDir, jobId),
			jobMetadataFile = path.join(jobDir, jobId + '.json');

		self.getJob({
			jobId: jobId
		}).then(function (jobMetadata) {
			// update status if supplied
			if (args.status) {
				jobMetadata.status = args.status;
				delete jobMetadata.statusMessage;
			}
			if (args.statusMessage) {
				jobMetadata.statusMessage = args.statusMessage;
			}
			if (args.translatedZipFile) {
				jobMetadata.translatedZipFile = args.translatedZipFile;
			}
			if (args.progress) {
				jobMetadata.progress = args.progress;
			}

			// merge the job properties metadata
			Object.keys(args.properties || {}).forEach(function (key) {
				jobMetadata.properties[key] = args.properties[key];
			});

			// write out the data
			fs.writeFile(jobMetadataFile, JSON.stringify(jobMetadata), function (err) {
				if (err) {
					console.log('SampleFilePersistenceStore.updateJob(): failed to write job.json file for: ' + jobId);
					reject({
						errorCode: 500,
						errorMessage: JSON.stringify(err)
					});
				} else {
					// return the merge job object
					resolve(jobMetadata);
				}
			});
		}).catch(function (err) {
			console.log('SampleFilePersistenceStore.updateJob(): failed to get job.json file for: ' + jobId);
			reject({
				errorCode: 500,
				errorMessage: JSON.stringify(err)
			});
		});
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.deleteJob = function (args) {
	return new Promise(function (resolve, reject) {
		// delete the job
		var jobId = args.jobId,
			jobFilePath = path.join(translationJobsDir, jobId);

		// delete the directory
		try {
			if (fs.existsSync(jobFilePath)) {
				var deleteFolderRecursive = function (path) {
					if (fs.existsSync(path)) {
						fs.readdirSync(path).forEach(function (file, index) {
							var curPath = path + "/" + file;
							if (fs.lstatSync(curPath).isDirectory()) {
								deleteFolderRecursive(curPath);
							} else {
								fs.unlinkSync(curPath);
							}
						});
						fs.rmdirSync(path);
					}
				};
				deleteFolderRecursive(jobFilePath);
			}
			resolve();
		} catch (err) {
			console.log('SampleFilePersistenceStore.deleteJob(): failed to delete job folder for: ' + jobId);
			reject({
				errorCode: 500,
				errorMessage: JSON.stringify(err)
			});
		}
	});
};

var extractZip = function (filePath, targetPath) {

	async function _extract(filePath, targetPath) {
		var err;
		try {
			// console.log(' - extract ' + filePath + ' to ' + targetPath);
			await extract(filePath, {
				dir: targetPath
			});
			// console.log(' - extraction complete');
			return err;
		} catch (e) {
			console.log('ERROR: failed to extract ' + filePath);
			console.log(e);
			err = 'err';
			return err;
		}
	}

	return _extract(filePath, targetPath);

};

//
// Source file CRUD operations
// 
/** @inheritdoc */
SampleFilePersistenceStore.prototype.importSourceZip = function (args) {
	return new Promise(function (resolve, reject) {
		// save and unzip the zipfile if provided
		var jobId = args.jobId,
			zipFile = args.zipFile,
			jobDir = path.join(translationJobsDir, jobId),
			jobTranslationFile = path.join(jobDir, jobId + '.zip'),
			zipFileDir = path.join(jobDir, 'translation-source');

		// create the translation source folder
		// this will contain the expanded zip file
		if (!fs.existsSync(zipFileDir)) {
			fs.mkdirSync(zipFileDir);
		}

		// save the zipfile 
		fs.writeFile(jobTranslationFile, zipFile, function (err) {
			if (err) {
				console.log('persistenceApi.importJobZip(): failed to save zip file');
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				console.log('persistenceApi.importJobZip(): Saved file to translate: ' + jobTranslationFile);

				// now we need to unzip the file locally
				extractZip(jobTranslationFile, zipFileDir)
					.then(function (err) {
						// if an error occurred, report it
						if (err) {
							console.log('persistenceApi.importJobZip(): failed to extract zipfile: ' + jobTranslationFile);
							reject({
								errorCode: 500,
								errorMessage: JSON.stringify(err)
							});
							return;
						}

						// we're done
						resolve();
					});
			}
		});
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getSourceJobJSON = function (args) {
	// Get the job.json files 
	return new Promise(function (resolve, reject) {
		// The folder structure differs between "site" and "asssets" job types so combine them as required
		// get the translation source files directory
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			translationSrcDir = path.join(jobDir, 'translation-source'),
			assetsJSON,
			siteJSON;

		// check for "assets only"
		var assetsOnlyFile = path.join(translationSrcDir, 'job.json');
		if (fs.existsSync(assetsOnlyFile)) {
			// found top level asset file - get the asset job.json
			assetsJSON = fs.readFileSync(assetsOnlyFile);

			resolve({
				type: 'ASSETS',
				assetsJSON: fs.readFileSync(assetsOnlyFile)
			});
		} else {
			// check for "sites" folder
			var sitesFile = path.join(translationSrcDir, 'site', 'job.json');
			if (fs.existsSync(sitesFile)) {
				siteJSON = fs.readFileSync(sitesFile);
			}

			// check for "assets" folder
			var assetsFile = path.join(translationSrcDir, 'assets', 'job.json');
			if (fs.existsSync(assetsFile)) {
				assetsJSON = fs.readFileSync(assetsFile);
			}

			if (siteJSON || assetsJSON) {
				// this is a site translation file, return both assets & site job.json data
				resolve({
					type: 'SITE',
					siteJSON: siteJSON,
					assetsJSON: assetsJSON
				});
			} else {
				// didn't find any job.json files - invalid zip
				var errorMessage = 'SampleFilePersistenceStore.getSourceZipJobJSON() - unable to locate any job.json files in imported zip';
				console.log(errorMessage);
				reject({
					errorCode: 404,
					errorMessage: errorMessage
				});
			}
		}
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getSourceFile = function (args) {
	return new Promise(function (resolve, reject) {
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			translationSrcDir = path.join(jobDir, 'translation-source'),
			assetJobJSON = path.join(translationSrcDir, 'job.json'),
			fileSrcDir = fs.existsSync(assetJobJSON) ? path.join(translationSrcDir, 'root') : path.join(translationSrcDir, args.fileType, 'root'),
			filePath = path.join(fileSrcDir, args.filePath);

		// read in the requested file
		if (fs.existsSync(filePath)) {
			resolve(fs.readFileSync(filePath));
		} else {
			console.log('SampleFilePersistenceStore.zip.getFile(): unable to locate file: ' + filePath);
			reject({
				errorCode: 404,
				errorMessage: 'unable to locate file: ' + args.file
			});
		}
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getBinaryFilePath = function (args) {
	return path.join('files', args.contentItemId);
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getFolderFiles = function (args) {
	var jobId = args.jobId,
		jobDir = path.join(translationJobsDir, jobId),
		translationSrcDir = path.join(jobDir, 'translation-source'),
		assetJobJSON = path.join(translationSrcDir, 'job.json'),
		fileSrcDir = fs.existsSync(assetJobJSON) ? path.join(translationSrcDir, 'root') : path.join(translationSrcDir, args.fileType, 'root'),
		folderPath = path.join(fileSrcDir, args.folderPath);

	// read in the requested folder
	if (fs.existsSync(folderPath)) {
		var files = fs.readdirSync(folderPath);
		var entries = [];
		files.map(function(file) {
			var entry = {
				id: args.id,
				name: file,
				folderPath: args.folderPath
			};
			entries.push(entry);
		});
		return entries;
	} else {
		return [];
	}
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getBinaryFileSpec = function (args) {
	var jobId = args.jobId,
		jobDir = path.join(translationJobsDir, jobId),
		sourceDir = path.join(jobDir, 'translation-source'),
		assetJobJSON = path.join(sourceDir, 'job.json'),
		fileSrcDir = fs.existsSync(assetJobJSON) ? path.join(sourceDir, 'root') : path.join(sourceDir, args.fileType, 'root'),
		folder = path.join(fileSrcDir, args.file.folderPath);

	return path.join(folder, args.file.name);
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getBinaryFile = function (args) {
	var self = this;

	return new Promise(function (resolve, reject) {
		var binaryFileSpec = self.getBinaryFileSpec(args);

		// read in the requested file
		if (fs.existsSync(binaryFileSpec)) {
			resolve(fs.readFileSync(binaryFileSpec));
		} else {
			console.log('SampleFilePersistenceStore.zip.getBinaryFile(): unable to locate file: ' + filePath);
			reject({
				errorCode: 404,
				errorMessage: 'unable to locate file: ' + args.file
			});
		}
	});
};
//
// Translated files CRUD operations
// 
/** @inheritdoc */
SampleFilePersistenceStore.prototype.createTranslationRepository = function (args) {
	return new Promise(function (resolve, reject) {
		// initialize zip folder and copy across the job.json files
		var jobId = args.jobId,
			sourceDir = path.join(translationJobsDir, jobId, 'translation-source'),
			targetDir = path.join(translationJobsDir, jobId, 'translation-target');

		// if the zip folder doesn't exist
		if (!fs.existsSync(targetDir)) {
			fs.mkdirSync(targetDir);

			// copy across the job.json files
			var assetJobJSON = path.join(sourceDir, 'job.json');
			if (fs.existsSync(assetJobJSON)) {
				// asset zip
				// copy across the single job.json file
				fs.copyFileSync(assetJobJSON, path.join(targetDir, 'job.json'));
			} else {
				// get the available folders from the source
				var sourceFolders = ['site', 'assets'].filter(function (fileType) {
					return fs.existsSync(path.join(sourceDir, fileType));
				});

				// copy across each of the site/assets job.json files
				sourceFolders.forEach(function (fileType) {
					// create the type folder 
					var typeDir = path.join(targetDir, fileType);
					if (!fs.existsSync(typeDir)) {
						fs.mkdirSync(typeDir);
					}

					// copy across the .json file into the type folder
					fs.copyFileSync(path.join(sourceDir, fileType, 'job.json'), path.join(typeDir, 'job.json'));
				});
			}
		}

		resolve();
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.addTranslationFile = function (args) {
	return new Promise(function (resolve, reject) {
		// initialize zip folder and copy across the job.json files
		var jobId = args.jobId,
			targetDir = path.join(translationJobsDir, jobId, 'translation-target'),
			assetJobJSON = path.join(targetDir, 'job.json'),
			localeDir = fs.existsSync(assetJobJSON) ? path.join(targetDir, args.locale) : path.join(targetDir, args.fileType, args.locale),
			filePath = path.join(localeDir, args.fileName);

		// make the locale directory
		if (!fs.existsSync(localeDir)) {
			fs.mkdirSync(localeDir);
		}

		// write the file to the locale directory
		fs.writeFile(filePath, args.fileContent, function (err) {
			if (err) {
				console.log('SampleFilePersistenceStore.zip.addTranslationFile(): failed to write file content for - ' + args.fileName);
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				// done
				resolve();
			}
		});
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.addBinaryTranslationFile = function (args) {
	return new Promise(function (resolve, reject) {
		// initialize zip folder and copy across the job.json files
		var jobId = args.jobId,
			targetDir = path.join(translationJobsDir, jobId, 'translation-target'),
			assetJobJSON = path.join(targetDir, 'job.json'),
			localeDir = fs.existsSync(assetJobJSON) ? path.join(targetDir, args.locale) : path.join(targetDir, args.fileType, args.locale),
			folderPath = path.join(localeDir, args.folderPath),
			filePath = path.join(folderPath, args.fileName);

		// make the locale directory
		if (!fs.existsSync(folderPath)) {
			fs.mkdirSync(folderPath, { recursive: true });
		}

		// write the file to the locale directory
		fs.writeFile(filePath, args.fileContent, { encoding: 'binary' }, function (err) {
			if (err) {
				console.log('SampleFilePersistenceStore.addBinaryTranslationFile(): failed to write file content for - ' + args.fileName);
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				// done
				resolve();
			}
		});
	});
};

/** @inheritdoc */
SampleFilePersistenceStore.prototype.createTranslationZip = function (args) {
	var self = this;
	return new Promise(function (resolve, reject) {
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			targetDir = path.join(jobDir, 'translation-target'),
			zipFile = jobId + '_Translations' + Math.floor(100000 + Math.random() * 900000) + '.zip',
			zipFilePath = path.join(jobDir, zipFile);

		// create the zip
		gulp.src([targetDir + '/**'])
			.pipe(zip(zipFile))
			.pipe(gulp.dest(jobDir))
			.on('end', function () {

				// update the job metadata
				self.getJob({
					jobId: jobId
				}).then(function (jobMetadata) {
					// add the zipfile to the metadata
					jobMetadata.translatedZipFile = zipFile;

					self.updateJob(jobMetadata).then(function () {
						resolve(jobMetadata);
					}).catch(function (error) {
						console.log('SampleFilePersistenceStore.zip.createExport(): failed to write job.json file for: ' + jobId);
						reject(error);
					});
				}).catch(function (error) {
					console.log('SampleFilePersistenceStore.zip.export(): failed to get job metadata for: ' + jobId);
					reject(error);
				});
			});
	});
};

/** @inheritdoc */
SampleFilePersistenceStore.prototype.exportTranslationZip = function (args) {
	return new Promise(function (resolve, reject) {
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			jobMetadataFile = path.join(jobDir, jobId + '.json');

		// get the zip file path
		fs.readFile(jobMetadataFile, function (err, data) {
			if (err) {
				console.log('SampleFilePersistenceStore.zip.export(): failed to read job.json file for: ' + jobId);
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				try {
					var jobMetadata = JSON.parse(data);
					zipFilePath = path.join(jobDir, jobMetadata.translatedZipFile);

					if (fs.existsSync(zipFilePath)) {
						// return the zip file stream
						resolve({
							size: fs.statSync(zipFilePath).size,
							stream: fs.createReadStream(zipFilePath)
						});
					} else {
						console.log('SampleFilePersistenceStore.zip.createExport(): failed to find translation zip file for: ' + jobId);
						reject({
							errorCode: 500,
							errorMessage: JSON.stringify(err)
						});
					}
				} catch (parseErr) {
					console.log('SampleFilePersistenceStore.zip.createExport(): failed to parse job.json file for: ' + jobId);
					reject({
						errorCode: 500,
						errorMessage: JSON.stringify(parseErr)
					});
				}
			}
		});

	});
};


//
// File metadata CRUD
// 
/** @inheritdoc */
SampleFilePersistenceStore.prototype.createFileMetadata = function (args) {
	return new Promise(function (resolve, reject) {
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			translationSrcDir = path.join(jobDir, 'translation-source'),
			assetJobJSON = path.join(translationSrcDir, 'job.json'),
			fileSrcDir = fs.existsSync(assetJobJSON) ? path.join(translationSrcDir, 'root') : path.join(translationSrcDir, args.fileType, 'root'),
			folderPathFileSrcDir = args.file.folderPath ? path.join(fileSrcDir, args.file.folderPath) : fileSrcDir,
			fileMetadataFile = path.join(folderPathFileSrcDir, args.file.id + '_metadata.json');

		// write out the data
		fs.writeFile(fileMetadataFile, JSON.stringify(args.file), function (err) {
			if (err) {
				console.log('SampleFilePersistenceStore.file.create(): failed to write metadata file for - ' + file.name);
				reject({
					errorCode: 500,
					errorMessage: JSON.stringify(err)
				});
			} else {
				// return the file metadata
				resolve(args.file);
			}
		});
	});
};
/** @inheritdoc */
SampleFilePersistenceStore.prototype.getFileMetadata = function (args) {
	return new Promise(function (resolve, reject) {
		var jobId = args.jobId,
			jobDir = path.join(translationJobsDir, jobId),
			translationSrcDir = path.join(jobDir, 'translation-source'),
			assetJobJSON = path.join(translationSrcDir, 'job.json'),
			fileSrcDir = fs.existsSync(assetJobJSON) ? path.join(translationSrcDir, 'root') : path.join(translationSrcDir, args.fileType, 'root'),
			folderPathFileSrcDir = args.file.folderPath ? path.join(fileSrcDir, args.file.folderPath) : fileSrcDir,
			fileMetadataFile = path.join(folderPathFileSrcDir, args.file.id + '_metadata.json');

		// read in the file
		if (fs.existsSync(fileMetadataFile)) {
			fs.readFile(fileMetadataFile, function (err, data) {
				if (err) {
					console.log('SampleFilePersistenceStore.file.get(): failed to read metadata file for: ' + args.file.name);
					reject({
						errorCode: 500,
						errorMessage: JSON.stringify(err)
					});
				} else {
					try {
						resolve(JSON.parse(data));
					} catch (parseErr) {
						console.log('SampleFilePersistenceStore.file.get(): failed to parse metadata file for: ' + args.file.name);
						reject({
							errorCode: 500,
							errorMessage: JSON.stringify(parseErr)
						});
					}
				}
			});
		} else {
			// no job file, reject
			var errorMessage = 'SampleFilePersistenceStore.file.get(): no metadata file available for file: ' + args.file.id + 'in' + folderPathFileSrcDir;
			// Could be benign.
			// console.log(errorMessage);
			reject({
				errorCode: 500,
				errorMessage: errorMessage
			});
		}
	});
};

/** @inheritdoc */
SampleFilePersistenceStore.prototype.updateFileMetadata = function (args) {
	// just overwrite the metadata file
	return this.createFileMetadata(args);
};

// Export the persistence store 
module.exports = new SampleFilePersistenceStore();