Better build process (#1248)

* Remove gulp, replace with custom scripts
* Symlink entire dirs where possible (fixes #1232)
* Significantly speed up subsequent builds (fixes #1238)
* Watch process now observes new/removed files, not only changed
* Add ignoreMask, exclude all files with names starting with a #
* Better logging during builds
* Update travis.yml to use new, non-gulp-based build
This commit is contained in:
Tom Najdek 2017-06-21 00:18:46 +01:00 committed by Dan Stillman
parent 3259b63081
commit b53fabbb58
19 changed files with 1149 additions and 1607 deletions

View File

@ -12,9 +12,7 @@
"resource/react-dom.js",
"resource/bluebird.js",
"resource/bluebird/*.js",
"test/resource/httpd.js",
"test/resource/mocha.js",
"test/resource/co-mocha.js"
"test/resource/*.js"
],
"plugins": [
"syntax-flow",

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
.DS_Store
node_modules
build
.signatures.json

View File

@ -29,7 +29,7 @@ before_script:
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
- npm i
- node_modules/.bin/gulp build
- npm run build
- if [ $FX_VERSION = "54.0b" ] &&
[ $TRAVIS_REPO_SLUG = "zotero/zotero" ] &&
[ $TRAVIS_BRANCH = "master" ] &&

View File

@ -1,55 +0,0 @@
/* global onmessage: true, postMessage: false */
'use strict';
const fs = require('fs');
const path = require('path');
const babel = require('babel-core');
const minimatch = require('minimatch')
const mkdirp = require('mkdirp');
const options = JSON.parse(fs.readFileSync('.babelrc'));
/* exported onmessage */
onmessage = (ev) => {
const t1 = Date.now();
const sourcefile = path.normalize(ev.data);
let error = null;
let isSkipped = false;
fs.readFile(sourcefile, 'utf8', (err, data) => {
var transformed;
if(sourcefile === 'resource/react-dom.js') {
transformed = data.replace(/ownerDocument\.createElement\((.*?)\)/gi, 'ownerDocument.createElementNS(DOMNamespaces.html, $1)');
} else if('ignore' in options && options.ignore.some(ignoreGlob => minimatch(sourcefile, ignoreGlob))) {
transformed = data;
isSkipped = true;
} else {
try {
transformed = babel.transform(data, options).code;
} catch(c) {
transformed = data;
isSkipped = true;
error = c.message;
}
}
const outfile = path.join('build', sourcefile);
error = error || err;
mkdirp(path.dirname(outfile), err => {
error = error || err;
fs.writeFile(outfile, transformed, err => {
error = error || err;
const t2 = Date.now();
postMessage({
isSkipped,
sourcefile,
outfile,
error,
processingTime: t2 - t1
});
});
});
});
};

View File

@ -1,34 +0,0 @@
const path = require('path');
const gutil = require('gulp-util');
const through = require('through2');
const PluginError = gutil.PluginError;
const PLUGIN_NAME = 'gulp-react-patcher';
module.exports = function() {
return through.obj(function(file, enc, callback) {
if (file.isNull()) {
this.push(file);
return callback();
}
if(file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Streams are not supported!'));
return callback();
}
try {
let filename = path.basename(file.path);
if(filename === 'react-dom.js') {
file.contents = Buffer.from(file.contents.toString().replace(/ownerDocument\.createElement\((.*?)\)/gi, 'ownerDocument.createElementNS(DOMNamespaces.html, $1)'), enc);
}
} catch(e) {
this.emit('error', new PluginError(PLUGIN_NAME, e));
}
this.push(file);
callback();
});
};

View File

@ -1,263 +0,0 @@
'use strict';
const gulp = require('gulp');
const del = require('del');
const vfs = require('vinyl-fs');
const gutil = require('gulp-util');
const babel = require('gulp-babel');
const sass = require('gulp-sass');
const os = require('os');
const glob = require('glob');
const Worker = require('tiny-worker');
const merge = require('merge-stream');
const tap = require('gulp-tap');
const rename = require('gulp-rename');
const browserify = require('browserify');
const reactPatcher = require('./gulp/gulp-react-patcher');
const isWindows = /^win/.test(process.platform);
const NODE_ENV = process.env.NODE_ENV;
const workers = [];
var isExiting = false;
const formatDirsforMatcher = dirs => {
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
};
const killAllWorkers = () => {
for(let worker of workers) {
worker.terminate();
}
};
// list of folders from where .js files are compiled and non-js files are symlinked
const dirs = [
'chrome',
'components',
'defaults',
'resource',
'resource/web-library',
'test',
'test/resource/chai',
'test/resource/chai-as-promised',
'test/resource/mocha'
];
// list of folders from which all files are symlinked
const symlinkDirs = [
'styles',
'translators',
];
// list of folders which are copied to the build folder
const copyDirs = [
'test/tests/data' // browser follows symlinks when loading test data
// triggering false-positive test results with mismatched URIs
];
// list of files from root folder to symlink
const symlinkFiles = [
'chrome.manifest', 'install.rdf', 'update.rdf'
];
// these files will be browserified during the build
const browserifyConfigs = [
{
src: 'node_modules/sinon/lib/sinon.js',
dest: 'test/resource/sinon.js',
config: {
standalone: 'sinon'
}
},
{
src: 'node_modules/chai-as-promised/lib/chai-as-promised.js',
dest: 'test/resource/chai-as-promised.js',
config: {
standalone: 'chaiAsPromised'
}
}
];
const jsGlob = `./\{${dirs.join(',')}\}/**/*.js`;
const jsGlobIgnore = `./\{${symlinkDirs.concat(copyDirs).join(',')}\}/**/*.js`;
function onError(shouldExit, err) {
if(shouldExit) {
isExiting = true;
killAllWorkers();
throw new Error(err);
} else {
gutil.log(gutil.colors.red('Error:'), err);
this.emit('end');
}
}
function onSuccess(msg) {
if(!isExiting) {
gutil.log(gutil.colors.green('Build:'), msg);
}
}
function getBrowserify(exitOnError = true) {
const streams = browserifyConfigs.map(config => {
return gulp
.src(config.src)
.pipe(tap(file => {
file.contents = browserify(file.path, config.config).bundle();
}))
.pipe(rename(config.dest))
.on('error', function(err) { onError.bind(this)(exitOnError, err); })
.on('data', file => {
onSuccess(`[browserify] ${file.path}`);
})
.pipe(gulp.dest('build'));
});
return merge.apply(merge, streams);
}
function getJS(source, sourceIgnore, exitOnError = true) {
if (sourceIgnore) {
source = [source, '!' + sourceIgnore];
}
return gulp.src(source, { base: '.' })
.pipe(babel())
.on('error', function(err) { onError.bind(this)(exitOnError, err); })
.on('data', file => {
onSuccess(`[js] ${file.path}`);
})
.pipe(reactPatcher())
.pipe(gulp.dest('./build'));
}
function getJSParallel(source, sourceIgnore, exitOnError = true) {
const jsFiles = glob.sync(source, { ignore: sourceIgnore });
const cpuCount = os.cpus().length;
const threadCount = Math.min(cpuCount, jsFiles.length);
let threadsActive = threadCount;
let isError = false;
return new Promise((resolve, reject) => {
for(let i = 0; i < threadCount; i++) {
let worker = new Worker('gulp/babel-worker.js');
workers[i] = worker;
worker.onmessage = ev => {
if(ev.data.error) {
isError = true;
let errorMsg = `Failed while processing ${ev.data.sourcefile}: ${ev.data.error}`;
NODE_ENV == 'debug' && console.log(`process ${i}: ${errorMsg}`);
onError(exitOnError, errorMsg);
reject(errorMsg);
}
NODE_ENV == 'debug' && console.log(`process ${i} took ${ev.data.processingTime} ms to process ${ev.data.sourcefile}`);
NODE_ENV != 'debug' && onSuccess(`[js] ${ev.data.sourcefile}`);
if(ev.data.isSkipped) {
NODE_ENV == 'debug' && console.log(`process ${i} SKIPPED ${ev.data.sourcefile}`);
}
let nextFile = jsFiles.pop();
if(!isError && nextFile) {
NODE_ENV == 'debug' && console.log(`process ${i} scheduled to process ${nextFile}`);
worker.postMessage(nextFile);
} else {
NODE_ENV == 'debug' && console.log(`process ${i} has terminated`);
worker.terminate();
workers.splice(i, 1);
if(!--threadsActive) {
resolve();
}
}
};
worker.postMessage(jsFiles.pop());
}
NODE_ENV == 'debug' && console.log(`Started ${threadCount} processes for processing JS`);
});
}
function getSymlinks(exitOnError = true) {
const match = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat(symlinkDirs.map(d => `${d}/**`))
.concat([`!./${formatDirsforMatcher(dirs)}/**/*.js`])
.concat([`!./${formatDirsforMatcher(copyDirs)}/**`]);
return gulp
.src(match, { nodir: true, base: '.', read: isWindows })
.on('error', function(err) { onError.bind(this)(exitOnError, err); })
.on('data', file => {
onSuccess(`[ln] ${file.path.substr(__dirname.length + 1)}`);
})
.pipe(isWindows ? gulp.dest('build/') : vfs.symlink('build/'));
}
function getCopy(exitOnError = true) {
return gulp
.src(copyDirs.map(d => `${d}/**`), { base: '.' })
.on('data', file => {
onSuccess(`[cp] ${file.path.substr(__dirname.length + 1)}`);
})
.on('error', function(err) { onError.bind(this)(exitOnError, err); })
.pipe(gulp.dest('build/'));
}
function getSass(exitOnError = true) {
return gulp
.src('scss/*.scss')
.on('error', function(err) { onError.bind(this)(exitOnError, err); })
.pipe(sass())
.pipe(gulp.dest('./build/chrome/skin/default/zotero/components/'));
}
gulp.task('clean', () => {
return del('build');
});
gulp.task('symlink', () => {
return getSymlinks();
});
gulp.task('js', done => {
getJSParallel(jsGlob, jsGlobIgnore)
.then(done)
.catch(errorMsg => {
onError(errorMsg);
});
});
gulp.task('browserify', () => {
getBrowserify();
});
gulp.task('copy', () => {
getCopy();
});
gulp.task('sass', () => {
return getSass();
});
gulp.task('build', ['js', 'sass', 'symlink', 'browserify', 'copy']);
gulp.task('build-clean', ['clean'], () => {
gulp.start('build');
});
gulp.task('dev', ['clean'], () => {
var interval = 750;
gulp.watch(jsGlob, { interval }).on('change', event => {
getJS(event.path, jsGlobIgnore, false);
});
gulp.watch('src/styles/*.scss', { interval }).on('change', () => {
getSass(false);
});
gulp.start('build');
});
gulp.task('default', ['dev']);

1507
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,10 +5,13 @@
"description": "Zotero",
"main": "",
"scripts": {
"start": "./node_modules/.bin/gulp",
"build": "./node_modules/.bin/gulp build-clean",
"sass": "./node_modules/.bin/gulp sass",
"clean": "./node_modules/.bin/gulp clean"
"start": "node ./scripts/build.js && node ./scripts/watch.js",
"build": "node ./scripts/build.js",
"sass": "node ./scripts/sass.js",
"js": "node ./scripts/js.js",
"clean": "node ./scripts/clean.js",
"clean-build": "node ./scripts/clean.js && node ./scripts/build.js",
"clean-start": "node ./scripts/clean.js && node ./scripts/build.js && node ./scripts/watch.js"
},
"license": "",
"dependencies": {
@ -33,22 +36,16 @@
"browserify": "^14.3.0",
"chai": "^3.5.0",
"chai-as-promised": "^6.0.0",
"chokidar": "^1.7.0",
"co-mocha": "^1.2.0",
"del": "^2.2.2",
"glob": "^7.1.2",
"gulp": "^3.9.1",
"gulp-babel": "^6.1.2",
"gulp-rename": "^1.2.2",
"gulp-sass": "^3.1.0",
"gulp-tap": "^1.0.1",
"gulp-util": "^3.0.7",
"merge-stream": "^1.0.1",
"minimatch": "^3.0.4",
"colors": "^1.1.2",
"eslint-plugin-react": "^7.1.0",
"fs-extra": "^3.0.1",
"globby": "^6.1.0",
"mocha": "^3.4.2",
"multimatch": "^2.1.0",
"node-sass": "^4.5.3",
"sinon": "^2.3.1",
"through2": "^2.0.1",
"tiny-worker": "^2.1.1",
"vinyl-fs": "^2.4.4",
"watchify": "^3.7.0"
"universalify": "^0.1.0"
}
}

56
scripts/babel-worker.js Normal file
View File

@ -0,0 +1,56 @@
/* global onmessage: true, postMessage: false */
'use strict';
const fs = require('fs-extra');
const path = require('path');
const babel = require('babel-core');
const multimatch = require('multimatch');
const options = JSON.parse(fs.readFileSync('.babelrc'));
const cluster = require('cluster');
/* exported onmessage */
async function babelWorker(ev) {
const t1 = Date.now();
const sourcefile = ev.file;
const outfile = path.join('build', sourcefile);
const postError = (error) => {
process.send({
sourcefile,
outfile,
error
});
};
var isSkipped = false;
var transformed;
try {
let contents = await fs.readFile(sourcefile, 'utf8');
if (sourcefile === 'resource/react-dom.js') {
// patch react
transformed = contents.replace(/ownerDocument\.createElement\((.*?)\)/gi, 'ownerDocument.createElementNS(DOMNamespaces.html, $1)');
} else if ('ignore' in options && options.ignore.some(ignoreGlob => multimatch(sourcefile, ignoreGlob).length)) {
transformed = contents;
isSkipped = true;
} else {
try {
transformed = babel.transform(contents, options).code;
} catch (error) { return postError(`Babel error: ${error}`);}
}
await fs.outputFile(outfile, transformed);
const t2 = Date.now();
process.send({
isSkipped,
sourcefile,
outfile,
processingTime: t2 - t1
});
} catch (error) { return postError(`I/O error: ${error}`); }
}
module.exports = babelWorker;
if (cluster.isWorker) {
process.on('message', babelWorker);
}

78
scripts/browserify.js Normal file
View File

@ -0,0 +1,78 @@
'use strict';
const browserify = require('browserify');
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
const { browserifyConfigs } = require('./config');
const ROOT = path.resolve(__dirname, '..');
async function getBrowserify(signatures) {
const t1 = Date.now();
var count = 0;
var config, f, totalCount;
while ((config = browserifyConfigs.pop()) != null) {
let files = await globby(config.src, { cwd: ROOT });
totalCount += files.length;
while ((f = files.pop()) != null) {
let newFileSignature = await getFileSignature(f);
const dest = path.join('build', config.dest);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
try {
await fs.mkdirp(path.dirname(dest));
const bundleFs = fs.createWriteStream(dest);
await new Promise((resolve, reject) => {
bundleFs
.on('error', reject)
.on('finish', resolve);
browserify(f, config.config).bundle().pipe(bundleFs);
});
onProgress(f, dest, 'browserify');
signatures[f] = newFileSignature;
count++;
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
}
const t2 = Date.now();
return {
action: 'browserify',
count,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getBrowserify;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getBrowserify(signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

44
scripts/build.js Normal file
View File

@ -0,0 +1,44 @@
const colors = require('colors/safe');
const getBrowserify = require('./browserify');
const getCopy = require('./copy');
const getJS = require('./js');
const getSass = require('./sass');
const getSymlinks = require('./symlinks');
const { formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError} = require('./utils');
const { dirs, symlinkDirs, copyDirs, symlinkFiles, jsFiles, ignoreMask } = require('./config');
if (require.main === module) {
(async () => {
try {
const t1 = Date.now();
global.isError = false; // used to prevent further output to avoid concealing errors
const symlinks = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`])
const signatures = await getSignatures();
const results = await Promise.all([
getBrowserify(signatures),
getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures),
getJS(jsFiles, { ignore: ignoreMask }, signatures),
getSass('scss/*.scss', { root: 'scss', ignore: ignoreMask }, signatures),
getSymlinks(symlinks, { nodir: true, ignore: ignoreMask }, signatures),
getSymlinks(symlinkDirs, { ignore: ignoreMask }, signatures),
cleanUp(signatures)
]);
await writeSignatures(signatures);
for (const result of results) {
onSuccess(result);
}
const t2 = Date.now();
console.log(colors.yellow(`Total build time ${t2 - t1}ms`));
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

26
scripts/clean.js Normal file
View File

@ -0,0 +1,26 @@
'use strict';
const path = require('path');
const fs = require('fs-extra');
const { onError } = require('./utils');
const ROOT = path.resolve(__dirname, '..');
async function getClean(source) {
await fs.remove(source);
}
module.exports = getClean;
if (require.main === module) {
(async () => {
try {
await getClean(path.join(ROOT, 'build'));
await getClean(path.join(ROOT, '.signatures.json'));
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

60
scripts/config.js Normal file
View File

@ -0,0 +1,60 @@
// list of folders from where .js files are compiled and non-js files are symlinked
const dirs = [
'chrome',
'components',
'defaults',
'resource',
'resource/web-library',
'test',
'test/resource/chai',
'test/resource/chai-as-promised',
'test/resource/mocha'
];
// list of folders from which all files are symlinked
const symlinkDirs = [
'styles',
'translators'
];
// list of folders which are copied to the build folder
const copyDirs = [
'test/tests/data' // browser follows symlinks when loading test data
// triggering false-positive test results with mismatched URIs
];
// list of files from root folder to symlink
const symlinkFiles = [
'chrome.manifest', 'install.rdf', 'update.rdf'
];
// these files will be browserified during the build
const browserifyConfigs = [
{
src: 'node_modules/sinon/lib/sinon.js',
dest: 'test/resource/sinon.js',
config: {
standalone: 'sinon'
}
},
{
src: 'node_modules/chai-as-promised/lib/chai-as-promised.js',
dest: 'test/resource/chai-as-promised.js',
config: {
standalone: 'chaiAsPromised'
}
}
];
// exclude mask used for js, copy, symlink and sass tasks
const ignoreMask = ['**/#*.*'];
const jsFiles = [
`{${dirs.join(',')}}/**/*.js`,
`!{${symlinkDirs.concat(copyDirs).join(',')}}/**/*.js`
];
module.exports = {
dirs, symlinkDirs, copyDirs, symlinkFiles, browserifyConfigs, jsFiles, ignoreMask
};

67
scripts/copy.js Normal file
View File

@ -0,0 +1,67 @@
'use strict';
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress } = require('./utils');
const { copyDirs, ignoreMask } = require('./config');
const ROOT = path.resolve(__dirname, '..');
async function getCopy(source, options, signatures) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
const totalCount = files.length;
var count = 0;
var f;
while ((f = files.pop()) != null) {
let newFileSignature = await getFileSignature(f);
const dest = path.join('build', f);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
try {
await fs.mkdirp(path.dirname(dest));
await fs.copy(f, dest);
onProgress(f, dest, 'cp');
signatures[f] = newFileSignature;
count++;
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
const t2 = Date.now();
return {
action: 'copy',
count,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getCopy;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getCopy(copyDirs.map(d => `${d}/**`), { ignore: ignoreMask }, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

130
scripts/js.js Normal file
View File

@ -0,0 +1,130 @@
const globby = require('globby');
const path = require('path');
const os = require('os');
const fs = require('fs-extra');
const cluster = require('cluster');
const { getSignatures, compareSignatures, getFileSignature, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
const { jsFiles, ignoreMask } = require('./config');
const NODE_ENV = process.env.NODE_ENV;
const ROOT = path.resolve(__dirname, '..');
async function getJS(source, options, signatures) {
const t1 = Date.now();
const matchingJSFiles = await globby(source, Object.assign({ cwd: ROOT }, options));
const cpuCount = os.cpus().length;
const totalCount = matchingJSFiles.length;
var count = 0;
var isError = false;
cluster.setupMaster({
exec: path.join(__dirname, 'babel-worker.js')
});
// check signatures, collect signatures for files to be processes
const newFilesSignatures = {};
const filesForProcessing = [];
var f;
while ((f = matchingJSFiles.pop()) != null) {
const newFileSignature = await getFileSignature(f);
const dest = path.join('build', f);
f = path.normalize(f);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
filesForProcessing.push(f);
newFilesSignatures[f] = newFileSignature;
}
// shortcut if no files need rebuilding
if (Object.keys(filesForProcessing).length === 0) {
const t2 = Date.now();
return Promise.resolve({
action: 'js',
count,
totalCount,
processingTime: t2 - t1
});
}
// distribute processing among workers
const workerCount = Math.min(cpuCount, filesForProcessing.length);
var workersActive = workerCount;
NODE_ENV == 'debug' && console.log(`Will process ${filesForProcessing.length} files using ${workerCount} processes`);
return new Promise((resolve, reject) => {
for (let i = 0; i < workerCount; i++) {
var worker = cluster.fork();
worker.on('message', function(ev) {
if (ev.error) {
isError = true;
let errorMsg = `Failed while processing ${ev.sourcefile}: ${ev.error}`;
reject(errorMsg);
} else {
signatures[ev.sourcefile] = newFilesSignatures[ev.sourcefile];
if (ev.isSkipped) {
NODE_ENV == 'debug' && console.log(`process ${this.id} SKIPPED ${ev.sourcefile}`);
} else {
NODE_ENV == 'debug' && console.log(`process ${this.id} took ${ev.processingTime} ms to process ${ev.sourcefile} into ${ev.outfile}`);
NODE_ENV != 'debug' && onProgress(ev.sourcefile, ev.outfile, 'js');
count++;
}
}
let nextFile = filesForProcessing.pop();
if (!isError && nextFile) {
NODE_ENV == 'debug' && console.log(`process ${this.id} scheduled to process ${nextFile}`);
this.send({
file: nextFile
});
} else {
if (this.isConnected()) {
this.kill();
}
NODE_ENV == 'debug' && console.log(`process ${this.id} has terminated`);
if (!--workersActive) {
const t2 = Date.now();
resolve({
action: 'js',
count,
totalCount,
processingTime: t2 - t1
});
}
}
});
let nextFile = filesForProcessing.pop();
NODE_ENV == 'debug' && console.log(`process ${worker.id} scheduled to process ${nextFile}`);
worker.send({
file: nextFile
});
}
});
}
module.exports = getJS;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getJS(jsFiles, { ignore: ignoreMask }, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

73
scripts/sass.js Normal file
View File

@ -0,0 +1,73 @@
'use strict';
const universalify = require('universalify');
const sass = require('node-sass');
const globby = require('globby');
const path = require('path');
const fs = require('fs-extra');
const { getSignatures, writeSignatures, cleanUp, compareSignatures, getFileSignature, onSuccess, onError, onProgress, getPathRelativeTo } = require('./utils');
const { ignoreMask } = require('./config');
const sassRender = universalify.fromCallback(sass.render);
const ROOT = path.resolve(__dirname, '..');
async function getSass(source, options, signatures) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
const totalCount = files.length;
var count = 0;
var f;
while ((f = files.pop()) != null) {
let newFileSignature = await getFileSignature(f);
const dest = path.join.apply(this, ['build', 'chrome', 'skin', 'default', 'zotero', 'components', getPathRelativeTo(f, 'scss')]);
if (f in signatures) {
if (compareSignatures(newFileSignature, signatures[f])) {
try {
await fs.access(dest, fs.constants.F_OK);
continue;
} catch (_) {
// file does not exists in build, fallback to browserifing
}
}
}
try {
const sass = await sassRender({
file: f
});
await fs.outputFile(dest, sass);
onProgress(f, dest, 'sass');
signatures[f] = newFileSignature;
count++;
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
const t2 = Date.now();
return {
action: 'sass',
count,
totalCount,
processingTime: t2 - t1
};
}
module.exports = getSass;
if (require.main === module) {
(async () => {
try {
const signatures = await getSignatures();
onSuccess(await getSass('scss/*.scss', { root: 'scss', ignore: ignoreMask }, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

86
scripts/symlinks.js Normal file
View File

@ -0,0 +1,86 @@
'use strict';
const path = require('path');
const fs = require('fs-extra');
const globby = require('globby');
const { isWindows, formatDirsForMatcher, getSignatures, writeSignatures, cleanUp, onSuccess, onError, onProgress } = require('./utils');
const { dirs, symlinkDirs, copyDirs, symlinkFiles, ignoreMask } = require('./config');
const ROOT = path.resolve(__dirname, '..');
//@TODO: change signature to getSymlinks(source, options, signatures)
// here and elsewhere
//
// run symlinks twice, once for files (with nodir: true)
// once for dirs
async function getSymlinks(source, options, signatures) {
const t1 = Date.now();
const files = await globby(source, Object.assign({ cwd: ROOT }, options ));
const filesDonePreviously = [];
for (const [f, signature] of Object.entries(signatures)) {
if ('isSymlinked' in signature && signature.isSymlinked) {
try {
await fs.access(path.join('build', f), fs.constants.F_OK);
// file found in signatures and build/ dir, skip
filesDonePreviously.push(f);
} catch (_) {
// file not found, needs symlinking
}
}
}
const filesToProcess = files.filter(f => !filesDonePreviously.includes(f));
const filesProcessedCount = filesToProcess.length;
var f;
while ((f = filesToProcess.pop()) != null) {
const dest = path.join('build', f);
try {
if (isWindows) {
await fs.copy(f, dest);
} else {
await fs.ensureSymlink(f, dest);
}
signatures[f] = {
isSymlinked: true
};
onProgress(f, dest, 'ln');
} catch (err) {
throw new Error(`Failed on ${f}: ${err}`);
}
}
const t2 = Date.now();
return {
action: 'symlink',
count: filesProcessedCount,
totalCount: files.length,
processingTime: t2 - t1
};
}
module.exports = getSymlinks;
if (require.main === module) {
(async () => {
try {
const source = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
const signatures = await getSignatures();
onSuccess(await getSymlinks(source, { nodir: true, ignore: ignoreMask }, signatures));
onSuccess(await getSymlinks(symlinkDirs, {}, signatures));
onSuccess(await cleanUp(signatures));
await writeSignatures(signatures);
} catch (err) {
process.exitCode = 1;
global.isError = true;
onError(err);
}
})();
}

126
scripts/utils.js Normal file
View File

@ -0,0 +1,126 @@
const path = require('path');
const fs = require('fs-extra');
const colors = require('colors/safe');
const green = colors.green;
const blue = colors.blue;
const yellow = colors.yellow;
const isWindows = /^win/.test(process.platform);
const ROOT = path.resolve(__dirname, '..');
const NODE_ENV = process.env.NODE_ENV;
function onError(err) {
console.log(colors.red('Error:'), err);
}
function onSuccess(result) {
var msg = `${green('Success:')} ${blue(`[${result.action}]`)} ${result.count} files processed`;
if (result.totalCount) {
msg += ` (out of total ${result.totalCount} matched)`;
}
msg += ` [${yellow(`${result.processingTime}ms`)}]`;
console.log(msg);
}
function onProgress(sourcefile, outfile, operation) {
if ('isError' in global && global.isError) {
return;
}
if (NODE_ENV == 'debug') {
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile} -> ${outfile}`);
} else {
console.log(`${colors.blue(`[${operation}]`)} ${sourcefile}`);
}
}
async function getSignatures() {
let signaturesFile = path.resolve(ROOT, '.signatures.json');
var signatures = {};
try {
signatures = await fs.readJson(signaturesFile);
} catch (_) {
// if signatures files doesn't exist, return empty object istead
}
return signatures;
}
async function writeSignatures(signatures) {
let signaturesFile = path.resolve(ROOT, '.signatures.json');
NODE_ENV == 'debug' && console.log('writing signatures to .signatures.json');
await fs.outputJson(signaturesFile, signatures);
}
async function cleanUp(signatures) {
const t1 = Date.now();
var removedCount = 0, invalidCount = 0;
for (let f of Object.keys(signatures)) {
try {
// check if file from signatures exists in source
await fs.access(f, fs.constants.F_OK);
} catch (_) {
invalidCount++;
NODE_ENV == 'debug' && console.log(`File ${f} found in signatures but not in src, deleting from build`);
try {
await fs.remove(path.join('build', f));
removedCount++;
} catch (_) {
// file wasn't in the build either
}
delete signatures[f];
}
}
const t2 = Date.now();
return {
action: 'cleanup',
count: removedCount,
totalCount: invalidCount,
processingTime: t2 - t1
};
}
async function getFileSignature(file) {
let stats = await fs.stat(file);
return {
mode: stats.mode,
mtime: stats.mtimeMs || stats.mtime.getTime(),
isDirectory: stats.isDirectory(),
isFile: stats.isFile()
};
}
function compareSignatures(a, b) {
return typeof a === 'object'
&& typeof b === 'object'
&& a != null
&& b != null
&& ['mode', 'mtime', 'isDirectory', 'isFile'].reduce((acc, k) => {
return acc ? k in a && k in b && a[k] == b[k] : false;
}, true);
}
function getPathRelativeTo(f, dirName) {
return path.relative(path.join(ROOT, dirName), path.join(ROOT, f));
}
const formatDirsForMatcher = dirs => {
return dirs.length > 1 ? `{${dirs.join(',')}}` : dirs[0];
};
module.exports = {
isWindows,
onError,
onProgress,
onSuccess,
cleanUp,
getSignatures,
getFileSignature,
compareSignatures,
writeSignatures,
getPathRelativeTo,
formatDirsForMatcher
};

81
scripts/watch.js Normal file
View File

@ -0,0 +1,81 @@
const path = require('path');
const chokidar = require('chokidar');
const multimatch = require('multimatch');
const { dirs, jsGlob, jsGlobIgnore, copyDirs, symlinkFiles } = require('./config');
const { onSuccess, onError, getSignatures, writeSignatures, cleanUp, formatDirsForMatcher } = require('./utils');
const getJS = require('./js');
const getSass = require('./sass');
const getCopy = require('./copy');
const getSymlinks = require('./symlinks');
const ROOT = path.resolve(__dirname, '..');
const source = [
'chrome',
'components',
'defaults',
'resource',
'scss',
'test',
'styles',
'translators',
'scss',
'chrome/**',
'components/**',
'defaults/**',
'resource/**',
'scss/**',
'test/**',
'styles/**',
'translators/**',
'scss/**'
];
const symlinks = symlinkFiles
.concat(dirs.map(d => `${d}/**`))
.concat([`!${formatDirsForMatcher(dirs)}/**/*.js`])
.concat([`!${formatDirsForMatcher(copyDirs)}/**`]);
var signatures;
process.on('SIGINT', () => {
writeSignatures(signatures);
process.exit();
});
function getWatch() {
let watcher = chokidar.watch(source, { cwd: ROOT })
.on('change', async (path) => {
try {
if (multimatch(path, jsGlob).length && !multimatch(path, jsGlobIgnore).length) {
onSuccess(await getJS(path, { ignore: jsGlobIgnore }, signatures));
} else if (multimatch(path, 'scss/*.scss').length) {
onSuccess(await getSass(path, {}, signatures));
} else if (multimatch(path, copyDirs.map(d => `${d}/**`)).length) {
onSuccess(await getCopy(path, {}, signatures));
} else if (multimatch(path, symlinks).length) {
onSuccess(await getSymlinks(path, { nodir: true }, signatures));
}
onSuccess(await cleanUp(signatures));
} catch (err) {
onError(err);
}
})
.on('unlink', async () => {
const signatures = await getSignatures();
onSuccess(await cleanUp(signatures));
});
watcher.add(source);
console.log('Watching files for changes...');
}
module.exports = getWatch;
if (require.main === module) {
(async () => {
signatures = await getSignatures();
getWatch();
})();
}