port to typescript
This commit is contained in:
parent
23d6447f15
commit
3afa3b5ef9
@ -1,7 +0,0 @@
|
||||
env:
|
||||
es6: true
|
||||
node: true
|
||||
extends: 'eslint:recommended'
|
||||
parserOptions:
|
||||
ecmaVersion: 2017
|
||||
sourceType: module
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -2,3 +2,4 @@
|
||||
/node_modules/
|
||||
/package-lock.json
|
||||
/shrinkwrap.yaml
|
||||
/yarn.lock
|
||||
|
@ -27,7 +27,9 @@
|
||||
},
|
||||
"homepage": "https://code.chor.date/defiler",
|
||||
"devDependencies": {
|
||||
"rollup": "*"
|
||||
"rollup": "*",
|
||||
"typescript": "*",
|
||||
"@types/node": "*"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "rollup -c -w",
|
||||
|
@ -1,6 +1,25 @@
|
||||
import * as path from 'path';
|
||||
import * as typescript from 'typescript';
|
||||
|
||||
const { compilerOptions } = require('./tsconfig.json');
|
||||
|
||||
export default {
|
||||
input: './src/index.js',
|
||||
input: './src/index.ts',
|
||||
external: name => /^[-_a-z]+$/.test(name),
|
||||
plugins: {
|
||||
resolveId: (importee, importer) => {
|
||||
if (importer && /^[./].*\/[^./]+$/.test(importee)) {
|
||||
return path.resolve(importer, '..', importee + '.ts');
|
||||
}
|
||||
},
|
||||
transform: (code, id) => {
|
||||
const result = typescript.transpileModule(code, { compilerOptions });
|
||||
return {
|
||||
code: result.outputText,
|
||||
map: result.sourceMapText && JSON.parse(result.sourceMapText),
|
||||
};
|
||||
},
|
||||
},
|
||||
output: [
|
||||
{
|
||||
file: './dist/index.cjs.js',
|
||||
|
365
src/Defiler.js
365
src/Defiler.js
@ -1,365 +0,0 @@
|
||||
import { readFile } from './fs.js';
|
||||
import { resolve } from 'path';
|
||||
|
||||
import File from './File.js';
|
||||
import Watcher from './Watcher.js';
|
||||
import * as context from './context.js';
|
||||
|
||||
const _origData = Symbol();
|
||||
const _status = Symbol();
|
||||
const _before = Symbol();
|
||||
const _during = Symbol();
|
||||
const _after = Symbol();
|
||||
const _watchers = Symbol();
|
||||
const _transform = Symbol();
|
||||
const _generators = Symbol();
|
||||
const _resolver = Symbol();
|
||||
const _onerror = Symbol();
|
||||
const _active = Symbol();
|
||||
const _waitingFor = Symbol();
|
||||
const _whenFound = Symbol();
|
||||
const _deps = Symbol();
|
||||
const _queue = Symbol();
|
||||
const _isProcessing = Symbol();
|
||||
const _startWave = Symbol();
|
||||
const _endWave = Symbol();
|
||||
const _enqueue = Symbol();
|
||||
const _processPhysicalFile = Symbol();
|
||||
const _processFile = Symbol();
|
||||
const _callTransform = Symbol();
|
||||
const _processGenerator = Symbol();
|
||||
const _checkWave = Symbol();
|
||||
const _processDependents = Symbol();
|
||||
const _markFound = Symbol();
|
||||
|
||||
export default class Defiler {
|
||||
constructor(...args) {
|
||||
const { transform, generators = [], resolver, onerror } = args.pop();
|
||||
if (typeof transform !== 'function') {
|
||||
throw new TypeError('defiler: transform must be a function');
|
||||
}
|
||||
if (
|
||||
!Array.isArray(generators) ||
|
||||
generators.some(generator => typeof generator !== 'function')
|
||||
) {
|
||||
throw new TypeError('defiler: generators must be an array of functions');
|
||||
}
|
||||
if (resolver && typeof resolver !== 'function') {
|
||||
throw new TypeError('defiler: resolver must be a function');
|
||||
}
|
||||
if (onerror && typeof onerror !== 'function') {
|
||||
throw new TypeError('defiler: onerror must be a function');
|
||||
}
|
||||
// set of original paths for all physical files
|
||||
this.paths = new Set();
|
||||
// original paths -> original file data for all physical files ({ path, stats, bytes, enc })
|
||||
this[_origData] = new Map();
|
||||
// original paths -> transformed files for all physical and virtual files
|
||||
this.files = new Map();
|
||||
// _before, _during, or _after exec has been called
|
||||
this[_status] = _before;
|
||||
// Watcher instances
|
||||
this[_watchers] = args.map(
|
||||
({
|
||||
dir,
|
||||
filter,
|
||||
read = true,
|
||||
enc = 'utf8',
|
||||
pre,
|
||||
watch = true,
|
||||
debounce = 10,
|
||||
}) => {
|
||||
if (typeof dir !== 'string') {
|
||||
throw new TypeError('defiler: dir must be a string');
|
||||
}
|
||||
if (filter && typeof filter !== 'function') {
|
||||
throw new TypeError('defiler: filter must be a function');
|
||||
}
|
||||
if (typeof read !== 'boolean' && typeof read !== 'function') {
|
||||
throw new TypeError('defiler: read must be a boolean or a function');
|
||||
}
|
||||
if (!Buffer.isEncoding(enc) && typeof enc !== 'function') {
|
||||
throw new TypeError(
|
||||
'defiler: enc must be a supported encoding or a function',
|
||||
);
|
||||
}
|
||||
if (pre && typeof pre !== 'function') {
|
||||
throw new TypeError('defiler: pre must be a function');
|
||||
}
|
||||
if (typeof watch !== 'boolean') {
|
||||
throw new TypeError('defiler: watch must be a boolean');
|
||||
}
|
||||
if (typeof debounce !== 'number') {
|
||||
throw new TypeError('defiler: debounce must be a number');
|
||||
}
|
||||
dir = resolve(dir);
|
||||
return new Watcher({ dir, filter, read, enc, pre, watch, debounce });
|
||||
},
|
||||
);
|
||||
// the transform to run on all files
|
||||
this[_transform] = transform;
|
||||
// unique symbols -> registered generators
|
||||
this[_generators] = new Map(
|
||||
generators.map(generator => [Symbol(), generator]),
|
||||
);
|
||||
// (base, path) => path resolver function, used in defiler.get and defiler.add from transform
|
||||
this[_resolver] = resolver;
|
||||
// handler to call when errors occur
|
||||
this[_onerror] = onerror;
|
||||
// original paths of all files currently undergoing transformation and symbols of all generators currently running
|
||||
this[_active] = new Set();
|
||||
// original paths -> number of other files they're currently waiting on to exist
|
||||
this[_waitingFor] = new Map();
|
||||
// original paths -> { promise, resolve, paths } objects for when awaited files become available
|
||||
this[_whenFound] = new Map();
|
||||
// array of [dependent, dependency] pairs, specifying changes to which files should trigger re-processing which other files
|
||||
this[_deps] = [];
|
||||
// queue of pending Watcher events to handle
|
||||
this[_queue] = [];
|
||||
// whether some Watcher event is currently already in the process of being handled
|
||||
this[_isProcessing] = false;
|
||||
}
|
||||
|
||||
// execute everything, and return a promise that resolves when the first wave of processing is complete
|
||||
async exec() {
|
||||
if (this[_status] !== _before) {
|
||||
throw new Error('defiler.exec: cannot call more than once');
|
||||
}
|
||||
this[_status] = _during;
|
||||
this[_isProcessing] = true;
|
||||
const done = this[_startWave]();
|
||||
// init the Watcher instances
|
||||
const files = [];
|
||||
await Promise.all(
|
||||
this[_watchers].map(async watcher => {
|
||||
watcher.on('', event => this[_enqueue](watcher, event));
|
||||
// note that all files are pending transformation
|
||||
await Promise.all(
|
||||
(await watcher.init()).map(async file => {
|
||||
const { path } = file;
|
||||
if (watcher.pre) {
|
||||
await watcher.pre(file);
|
||||
}
|
||||
this.paths.add(file.path);
|
||||
this[_active].add(file.path);
|
||||
files.push([watcher, path, file]);
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
for (const symbol of this[_generators].keys()) {
|
||||
this[_active].add(symbol);
|
||||
}
|
||||
// process each physical file
|
||||
for (const [watcher, path, file] of files) {
|
||||
this[_processPhysicalFile](watcher, path, file);
|
||||
}
|
||||
// process each generator
|
||||
for (const symbol of this[_generators].keys()) {
|
||||
this[_processGenerator](symbol);
|
||||
}
|
||||
// wait and finish up
|
||||
await done;
|
||||
this[_status] = _after;
|
||||
this[_isProcessing] = false;
|
||||
this[_enqueue]();
|
||||
}
|
||||
|
||||
// wait for a file to be available and retrieve it, marking dependencies as appropriate
|
||||
async get(path) {
|
||||
if (Array.isArray(path)) {
|
||||
return Promise.all(path.map(path => this.get(path)));
|
||||
}
|
||||
const current = context.current();
|
||||
path = this.resolve(path);
|
||||
if (typeof path !== 'string') {
|
||||
throw new TypeError('defiler.get: path must be a string');
|
||||
}
|
||||
if (current) {
|
||||
this[_deps].push([current, path]);
|
||||
}
|
||||
if (this[_status] === _during && !this.files.has(path) && current) {
|
||||
this[_waitingFor].set(current, (this[_waitingFor].get(current) || 0) + 1);
|
||||
if (this[_whenFound].has(path)) {
|
||||
const { promise, paths } = this[_whenFound].get(path);
|
||||
paths.push(current);
|
||||
await promise;
|
||||
} else {
|
||||
let resolve;
|
||||
const promise = new Promise(res => (resolve = res));
|
||||
this[_whenFound].set(path, { promise, resolve, paths: [current] });
|
||||
await promise;
|
||||
}
|
||||
}
|
||||
return this.files.get(path);
|
||||
}
|
||||
|
||||
// add a new virtual file
|
||||
add(file) {
|
||||
if (this[_status] === _before) {
|
||||
throw new Error('defiler.add: cannot call before calling exec');
|
||||
}
|
||||
if (typeof file !== 'object') {
|
||||
throw new TypeError('defiler.add: file must be an object');
|
||||
}
|
||||
file.path = this.resolve(file.path);
|
||||
this[_origData].set(file.path, file);
|
||||
this[_processFile](file, 'add');
|
||||
}
|
||||
|
||||
// resolve a given path from the file currently being transformed
|
||||
resolve(path) {
|
||||
return this[_resolver] && typeof context.current() === 'string'
|
||||
? this[_resolver](context.current(), path)
|
||||
: path;
|
||||
}
|
||||
|
||||
// private methods
|
||||
|
||||
// return a Promise that we will resolve at the end of this wave, and save its resolver
|
||||
[_startWave]() {
|
||||
return new Promise(res => (this[_endWave] = res));
|
||||
}
|
||||
|
||||
// add a Watcher event to the queue, and handle queued events
|
||||
async [_enqueue](watcher, event) {
|
||||
if (event) {
|
||||
this[_queue].push([watcher, event]);
|
||||
}
|
||||
if (this[_isProcessing]) {
|
||||
return;
|
||||
}
|
||||
this[_isProcessing] = true;
|
||||
while (this[_queue].length) {
|
||||
const done = this[_startWave]();
|
||||
const [watcher, { event, path, stats }] = this[_queue].shift();
|
||||
const file = { path, stats };
|
||||
if (watcher.pre) {
|
||||
await watcher.pre(file);
|
||||
}
|
||||
if (event === '+') {
|
||||
this[_processPhysicalFile](watcher, path, file);
|
||||
} else if (event === '-') {
|
||||
const { path } = file;
|
||||
const oldFile = this.files.get(path);
|
||||
this.paths.delete(path);
|
||||
this[_origData].delete(path);
|
||||
this.files.delete(path);
|
||||
await this[_callTransform](oldFile, 'delete');
|
||||
this[_processDependents](path);
|
||||
}
|
||||
await done;
|
||||
}
|
||||
this[_isProcessing] = false;
|
||||
}
|
||||
|
||||
// create a file object for a physical file and process it
|
||||
async [_processPhysicalFile]({ dir, read, enc }, path, file) {
|
||||
if (typeof read === 'function') {
|
||||
read = await read({ path, stats: file.stats });
|
||||
}
|
||||
if (read) {
|
||||
file.bytes = await readFile(dir + '/' + path);
|
||||
}
|
||||
if (typeof enc === 'function') {
|
||||
enc = await enc({ path, stats: file.stats, bytes: file.bytes });
|
||||
}
|
||||
file.enc = enc;
|
||||
this.paths.add(file.path);
|
||||
this[_origData].set(file.path, file);
|
||||
await this[_processFile](file, 'read');
|
||||
}
|
||||
|
||||
// transform a file, store it, and process dependents
|
||||
async [_processFile](data, event) {
|
||||
const file = Object.assign(new File(), data);
|
||||
const { path } = file;
|
||||
this[_active].add(path);
|
||||
await this[_callTransform](file, event);
|
||||
this.files.set(path, file);
|
||||
this[this[_status] === _during ? _markFound : _processDependents](path);
|
||||
this[_active].delete(path);
|
||||
this[_checkWave]();
|
||||
}
|
||||
|
||||
// call the transform on a file with the given event string, and handle errors
|
||||
async [_callTransform](file, event) {
|
||||
await null;
|
||||
context.create(file.path);
|
||||
try {
|
||||
await this[_transform]({ file, event });
|
||||
} catch (error) {
|
||||
if (this[_onerror]) {
|
||||
this[_onerror]({ file, event, error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// run the generator given by the symbol
|
||||
async [_processGenerator](symbol) {
|
||||
this[_active].add(symbol);
|
||||
const generator = this[_generators].get(symbol);
|
||||
await null;
|
||||
context.create(symbol);
|
||||
try {
|
||||
await generator();
|
||||
} catch (error) {
|
||||
if (this[_onerror]) {
|
||||
this[_onerror]({ generator, error });
|
||||
}
|
||||
}
|
||||
this[_active].delete(symbol);
|
||||
this[_checkWave]();
|
||||
}
|
||||
|
||||
// re-process all files that depend on a particular path
|
||||
[_processDependents](path) {
|
||||
const dependents = new Set();
|
||||
for (const [dependent, dependency] of this[_deps]) {
|
||||
if (dependency === path) {
|
||||
dependents.add(dependent);
|
||||
}
|
||||
}
|
||||
this[_deps] = this[_deps].filter(
|
||||
([dependent]) => !dependents.has(dependent),
|
||||
);
|
||||
for (const dependent of dependents) {
|
||||
if (this[_origData].has(dependent)) {
|
||||
this[_processFile](this[_origData].get(dependent), 'retransform');
|
||||
} else if (this[_generators].has(dependent)) {
|
||||
this[_processGenerator](dependent);
|
||||
}
|
||||
}
|
||||
this[_checkWave]();
|
||||
}
|
||||
|
||||
// check whether this wave is complete, and, if not, whether we need to break a deadlock
|
||||
[_checkWave]() {
|
||||
if (!this[_active].size) {
|
||||
this[_endWave]();
|
||||
} else if (
|
||||
this[_status] === _during &&
|
||||
[...this[_active]].every(path => this[_waitingFor].get(path))
|
||||
) {
|
||||
// all pending files are currently waiting for one or more other files to exist
|
||||
// break deadlock: assume all files that have not appeared yet will never do so
|
||||
for (const path of this[_whenFound].keys()) {
|
||||
if (!this[_active].has(path)) {
|
||||
this[_markFound](path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// mark a given awaited file as being found
|
||||
[_markFound](path) {
|
||||
if (this[_whenFound].has(path)) {
|
||||
const { resolve, paths } = this[_whenFound].get(path);
|
||||
for (const path of paths) {
|
||||
this[_waitingFor].set(path, this[_waitingFor].get(path) - 1);
|
||||
}
|
||||
resolve();
|
||||
this[_whenFound].delete(path);
|
||||
}
|
||||
}
|
||||
}
|
426
src/Defiler.ts
Normal file
426
src/Defiler.ts
Normal file
@ -0,0 +1,426 @@
|
||||
import { readFile } from './fs';
|
||||
import { Stats } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
|
||||
import File from './File';
|
||||
import Watcher, { WatcherEvent } from './Watcher';
|
||||
import * as context from './context';
|
||||
|
||||
export default class Defiler {
|
||||
// set of original paths for all physical files
|
||||
paths = new Set<string>();
|
||||
// original paths -> original file data for all physical files ({ path, stats, bytes, enc })
|
||||
private _origData = new Map<string, FileData>();
|
||||
// original paths -> transformed files for all physical and virtual files
|
||||
files = new Map<string, File>();
|
||||
// Before, During, or After exec has been called
|
||||
private _status = Status.Before;
|
||||
// Watcher instances
|
||||
private _watchers: Array<WatcherData>;
|
||||
// the transform to run on all files
|
||||
private _transform: Transform;
|
||||
// unique symbols -> registered generators
|
||||
private _generators: Map<Symbol, Generator>;
|
||||
// (base, path) => path resolver function, used in defiler.get and defiler.add from transform
|
||||
private _resolver: Resolver;
|
||||
// handler to call when errors occur
|
||||
private _onerror: OnError;
|
||||
// original paths of all files currently undergoing transformation and symbols of all generators currently running
|
||||
private _active = new Set<Name>();
|
||||
// original paths -> number of other files they're currently waiting on to exist
|
||||
private _waitingFor = new Map<Name, number>();
|
||||
// original paths -> { promise, resolve, paths } objects for when awaited files become available
|
||||
private _whenFound = new Map<string, WhenFound>();
|
||||
// array of [dependent, dependency] pairs, specifying changes to which files should trigger re-processing which other files
|
||||
private _deps = new Array<[Name, string]>();
|
||||
// queue of pending Watcher events to handle
|
||||
private _queue = new Array<[WatcherData, WatcherEvent]>();
|
||||
// whether some Watcher event is currently already in the process of being handled
|
||||
private _isProcessing = false;
|
||||
// end the current wave
|
||||
private _endWave: () => void = null;
|
||||
|
||||
constructor(...args: any[]) {
|
||||
const { transform, generators = [], resolver, onerror } = <DefilerData>(
|
||||
args.pop()
|
||||
);
|
||||
if (typeof transform !== 'function') {
|
||||
throw new TypeError('defiler: transform must be a function');
|
||||
}
|
||||
if (
|
||||
!Array.isArray(generators) ||
|
||||
generators.some(generator => typeof generator !== 'function')
|
||||
) {
|
||||
throw new TypeError('defiler: generators must be an array of functions');
|
||||
}
|
||||
if (resolver && typeof resolver !== 'function') {
|
||||
throw new TypeError('defiler: resolver must be a function');
|
||||
}
|
||||
if (onerror && typeof onerror !== 'function') {
|
||||
throw new TypeError('defiler: onerror must be a function');
|
||||
}
|
||||
this._watchers = args.map(
|
||||
({
|
||||
dir,
|
||||
filter,
|
||||
read = true,
|
||||
enc = 'utf8',
|
||||
pre,
|
||||
watch = true,
|
||||
debounce = 10,
|
||||
}) => {
|
||||
if (typeof dir !== 'string') {
|
||||
throw new TypeError('defiler: dir must be a string');
|
||||
}
|
||||
if (filter && typeof filter !== 'function') {
|
||||
throw new TypeError('defiler: filter must be a function');
|
||||
}
|
||||
if (typeof read !== 'boolean' && typeof read !== 'function') {
|
||||
throw new TypeError('defiler: read must be a boolean or a function');
|
||||
}
|
||||
if (!Buffer.isEncoding(enc) && typeof enc !== 'function') {
|
||||
throw new TypeError(
|
||||
'defiler: enc must be a supported encoding or a function',
|
||||
);
|
||||
}
|
||||
if (pre && typeof pre !== 'function') {
|
||||
throw new TypeError('defiler: pre must be a function');
|
||||
}
|
||||
if (typeof watch !== 'boolean') {
|
||||
throw new TypeError('defiler: watch must be a boolean');
|
||||
}
|
||||
if (typeof debounce !== 'number') {
|
||||
throw new TypeError('defiler: debounce must be a number');
|
||||
}
|
||||
dir = resolve(dir);
|
||||
return <WatcherData>(
|
||||
new Watcher({ dir, filter, read, enc, pre, watch, debounce })
|
||||
);
|
||||
},
|
||||
);
|
||||
this._transform = transform;
|
||||
this._generators = new Map(
|
||||
generators.map(generator => <[Symbol, Generator]>[Symbol(), generator]),
|
||||
);
|
||||
this._resolver = resolver;
|
||||
this._onerror = onerror;
|
||||
}
|
||||
|
||||
// execute everything, and return a promise that resolves when the first wave of processing is complete
|
||||
async exec(): Promise<void> {
|
||||
if (this._status !== Status.Before) {
|
||||
throw new Error('defiler.exec: cannot call more than once');
|
||||
}
|
||||
this._status = Status.During;
|
||||
this._isProcessing = true;
|
||||
const done = this._startWave();
|
||||
// init the Watcher instances
|
||||
const files = new Array<
|
||||
[WatcherData, string, { path: string; stats: Stats }]
|
||||
>();
|
||||
await Promise.all(
|
||||
this._watchers.map(async watcher => {
|
||||
watcher.on('', event => this._enqueue(watcher, event));
|
||||
// note that all files are pending transformation
|
||||
await Promise.all(
|
||||
(await watcher.init()).map(async file => {
|
||||
const { path } = file;
|
||||
if (watcher.pre) {
|
||||
await watcher.pre(file);
|
||||
}
|
||||
this.paths.add(file.path);
|
||||
this._active.add(file.path);
|
||||
files.push([watcher, path, file]);
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
for (const symbol of this._generators.keys()) {
|
||||
this._active.add(symbol);
|
||||
}
|
||||
// process each physical file
|
||||
for (const [watcher, path, file] of files) {
|
||||
this._processPhysicalFile(watcher, path, file);
|
||||
}
|
||||
// process each generator
|
||||
for (const symbol of this._generators.keys()) {
|
||||
this._processGenerator(symbol);
|
||||
}
|
||||
// wait and finish up
|
||||
await done;
|
||||
this._status = Status.After;
|
||||
this._isProcessing = false;
|
||||
this._enqueue();
|
||||
}
|
||||
|
||||
// wait for a file to be available and retrieve it, marking dependencies as appropriate
|
||||
async get(path: string): Promise<File>;
|
||||
async get(paths: string[]): Promise<File[]>;
|
||||
async get(path: any): Promise<any> {
|
||||
if (Array.isArray(path)) {
|
||||
return Promise.all(path.map(path => this.get(path)));
|
||||
}
|
||||
const current = <Name>context.current();
|
||||
path = this.resolve(path);
|
||||
if (typeof path !== 'string') {
|
||||
throw new TypeError('defiler.get: path must be a string');
|
||||
}
|
||||
if (current) {
|
||||
this._deps.push([current, path]);
|
||||
}
|
||||
if (this._status === Status.During && !this.files.has(path) && current) {
|
||||
this._waitingFor.set(current, (this._waitingFor.get(current) || 0) + 1);
|
||||
if (this._whenFound.has(path)) {
|
||||
const { promise, paths } = this._whenFound.get(path);
|
||||
paths.push(current);
|
||||
await promise;
|
||||
} else {
|
||||
let resolve;
|
||||
const promise = new Promise<void>(res => (resolve = res));
|
||||
this._whenFound.set(path, { promise, resolve, paths: [current] });
|
||||
await promise;
|
||||
}
|
||||
}
|
||||
return this.files.get(path);
|
||||
}
|
||||
|
||||
// add a new virtual file
|
||||
add(file: FileData): void {
|
||||
if (this._status === Status.Before) {
|
||||
throw new Error('defiler.add: cannot call before calling exec');
|
||||
}
|
||||
if (typeof file !== 'object') {
|
||||
throw new TypeError('defiler.add: file must be an object');
|
||||
}
|
||||
file.path = this.resolve(file.path);
|
||||
this._origData.set(file.path, file);
|
||||
this._processFile(file, 'add');
|
||||
}
|
||||
|
||||
// resolve a given path from the file currently being transformed
|
||||
resolve(path: string): string {
|
||||
return this._resolver && typeof context.current() === 'string'
|
||||
? this._resolver(context.current(), path)
|
||||
: path;
|
||||
}
|
||||
|
||||
// private methods
|
||||
|
||||
// return a Promise that we will resolve at the end of this wave, and save its resolver
|
||||
private _startWave(): Promise<void> {
|
||||
return new Promise(res => (this._endWave = res));
|
||||
}
|
||||
|
||||
// add a Watcher event to the queue, and handle queued events
|
||||
private async _enqueue(
|
||||
watcher?: WatcherData,
|
||||
event?: WatcherEvent,
|
||||
): Promise<void> {
|
||||
if (event) {
|
||||
this._queue.push([watcher, event]);
|
||||
}
|
||||
if (this._isProcessing) {
|
||||
return;
|
||||
}
|
||||
this._isProcessing = true;
|
||||
while (this._queue.length) {
|
||||
const done = this._startWave();
|
||||
const [watcher, { event, path, stats }] = this._queue.shift();
|
||||
const file = { path, stats };
|
||||
if (watcher.pre) {
|
||||
await watcher.pre(file);
|
||||
}
|
||||
if (event === '+') {
|
||||
this._processPhysicalFile(watcher, path, file);
|
||||
} else if (event === '-') {
|
||||
const { path } = file;
|
||||
const oldFile = this.files.get(path);
|
||||
this.paths.delete(path);
|
||||
this._origData.delete(path);
|
||||
this.files.delete(path);
|
||||
await this._callTransform(oldFile, 'delete');
|
||||
this._processDependents(path);
|
||||
}
|
||||
await done;
|
||||
}
|
||||
this._isProcessing = false;
|
||||
}
|
||||
|
||||
// create a file object for a physical file and process it
|
||||
private async _processPhysicalFile(
|
||||
{ dir, read, enc }: WatcherData,
|
||||
path: string,
|
||||
file: FileData,
|
||||
): Promise<void> {
|
||||
if (typeof read === 'function') {
|
||||
read = await read({ path, stats: file.stats });
|
||||
}
|
||||
if (read) {
|
||||
file.bytes = await readFile(dir + '/' + path);
|
||||
}
|
||||
if (typeof enc === 'function') {
|
||||
enc = await enc({ path, stats: file.stats, bytes: file.bytes });
|
||||
}
|
||||
file.enc = enc;
|
||||
this.paths.add(file.path);
|
||||
this._origData.set(file.path, file);
|
||||
await this._processFile(file, 'read');
|
||||
}
|
||||
|
||||
// transform a file, store it, and process dependents
|
||||
private async _processFile(data: FileData, event: string): Promise<void> {
|
||||
const file: File = Object.assign(new File(), data);
|
||||
const { path } = file;
|
||||
this._active.add(path);
|
||||
await this._callTransform(file, event);
|
||||
this.files.set(path, file);
|
||||
if (this._status === Status.During) {
|
||||
this._markFound(path);
|
||||
} else {
|
||||
this._processDependents(path);
|
||||
}
|
||||
this._active.delete(path);
|
||||
this._checkWave();
|
||||
}
|
||||
|
||||
// call the transform on a file with the given event string, and handle errors
|
||||
private async _callTransform(file: File, event: string): Promise<void> {
|
||||
await null;
|
||||
context.create(file.path);
|
||||
try {
|
||||
await this._transform({ file, event });
|
||||
} catch (error) {
|
||||
if (this._onerror) {
|
||||
this._onerror({ file, event, error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// run the generator given by the symbol
|
||||
private async _processGenerator(symbol: Symbol): Promise<void> {
|
||||
this._active.add(symbol);
|
||||
const generator = this._generators.get(symbol);
|
||||
await null;
|
||||
context.create(symbol);
|
||||
try {
|
||||
await generator();
|
||||
} catch (error) {
|
||||
if (this._onerror) {
|
||||
this._onerror({ generator, error });
|
||||
}
|
||||
}
|
||||
this._active.delete(symbol);
|
||||
this._checkWave();
|
||||
}
|
||||
|
||||
// re-process all files that depend on a particular path
|
||||
private _processDependents(path: Name): void {
|
||||
const dependents = new Set<Name>();
|
||||
for (const [dependent, dependency] of this._deps) {
|
||||
if (dependency === path) {
|
||||
dependents.add(dependent);
|
||||
}
|
||||
}
|
||||
this._deps = this._deps.filter(([dependent]) => !dependents.has(dependent));
|
||||
for (const dependent of dependents) {
|
||||
if (this._origData.has(<string>dependent)) {
|
||||
this._processFile(this._origData.get(<string>dependent), 'retransform');
|
||||
} else if (this._generators.has(<Symbol>dependent)) {
|
||||
this._processGenerator(<Symbol>dependent);
|
||||
}
|
||||
}
|
||||
this._checkWave();
|
||||
}
|
||||
|
||||
// check whether this wave is complete, and, if not, whether we need to break a deadlock
|
||||
private _checkWave(): void {
|
||||
if (!this._active.size) {
|
||||
this._endWave();
|
||||
} else if (
|
||||
this._status === Status.During &&
|
||||
[...this._active].every(path => !!this._waitingFor.get(path))
|
||||
) {
|
||||
// all pending files are currently waiting for one or more other files to exist
|
||||
// break deadlock: assume all files that have not appeared yet will never do so
|
||||
for (const path of this._whenFound.keys()) {
|
||||
if (!this._active.has(path)) {
|
||||
this._markFound(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// mark a given awaited file as being found
|
||||
private _markFound(path: string): void {
|
||||
if (this._whenFound.has(path)) {
|
||||
const { resolve, paths } = this._whenFound.get(path);
|
||||
for (const path of paths) {
|
||||
this._waitingFor.set(path, this._waitingFor.get(path) - 1);
|
||||
}
|
||||
resolve();
|
||||
this._whenFound.delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface DefilerData {
|
||||
transform: Transform;
|
||||
generators?: Generator[];
|
||||
resolver?: Resolver;
|
||||
onerror?: OnError;
|
||||
}
|
||||
|
||||
interface FileData {
|
||||
path: string;
|
||||
[propName: string]: any;
|
||||
}
|
||||
|
||||
interface Generator {
|
||||
(): Promise<void>;
|
||||
}
|
||||
|
||||
type Name = string | Symbol;
|
||||
|
||||
interface OnError {
|
||||
(
|
||||
arg: {
|
||||
file?: any;
|
||||
event?: string;
|
||||
generator?: Generator;
|
||||
error: Error;
|
||||
},
|
||||
): void;
|
||||
}
|
||||
|
||||
interface Resolver {
|
||||
(base: string, path: string): string;
|
||||
}
|
||||
|
||||
const enum Status {
|
||||
Before,
|
||||
During,
|
||||
After,
|
||||
}
|
||||
|
||||
interface Transform {
|
||||
(
|
||||
arg: {
|
||||
file: File;
|
||||
event: string;
|
||||
},
|
||||
): Promise<void>;
|
||||
}
|
||||
|
||||
interface WatcherData extends Watcher {
|
||||
read: boolean | ((arg: { path: string; stats: Stats }) => Promise<boolean>);
|
||||
enc:
|
||||
| string
|
||||
| ((arg: { path: string; stats: Stats; bytes: Buffer }) => Promise<string>);
|
||||
pre: (data: FileData) => Promise<void>;
|
||||
}
|
||||
|
||||
interface WhenFound {
|
||||
promise: Promise<void>;
|
||||
resolve: () => void;
|
||||
paths: Name[];
|
||||
}
|
130
src/File.js
130
src/File.js
@ -1,130 +0,0 @@
|
||||
const _path = Symbol();
|
||||
const _dir = Symbol();
|
||||
const _filename = Symbol();
|
||||
const _ext = Symbol();
|
||||
const _enc = Symbol();
|
||||
const _bytes = Symbol();
|
||||
const _text = Symbol();
|
||||
|
||||
export default class File {
|
||||
constructor() {
|
||||
// path of file
|
||||
this[_path] = null;
|
||||
// cached dir
|
||||
this[_dir] = null;
|
||||
// cached filename
|
||||
this[_filename] = null;
|
||||
// cached ext
|
||||
this[_ext] = null;
|
||||
// stats of file
|
||||
this.stats = null;
|
||||
// encoding
|
||||
this[_enc] = 'utf8';
|
||||
// Buffer of file contents
|
||||
this[_bytes] = null;
|
||||
// string of file contents
|
||||
this[_text] = null;
|
||||
}
|
||||
|
||||
get path() {
|
||||
return this[_path];
|
||||
}
|
||||
|
||||
set path(path) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new TypeError('file.path must be a string');
|
||||
}
|
||||
if (this[_path] !== path) {
|
||||
this[_path] = path;
|
||||
this[_dir] = this[_filename] = this[_ext] = null;
|
||||
}
|
||||
}
|
||||
|
||||
get dir() {
|
||||
if (this[_dir] == null) {
|
||||
const p = this[_path].lastIndexOf('/');
|
||||
this[_dir] = p > -1 ? this[_path].slice(0, p) : '';
|
||||
}
|
||||
return this[_dir];
|
||||
}
|
||||
|
||||
set dir(dir) {
|
||||
if (typeof dir !== 'string') {
|
||||
throw new TypeError('file.dir must be a string');
|
||||
}
|
||||
this.path = (dir ? dir + '/' : '') + this.filename;
|
||||
}
|
||||
|
||||
get filename() {
|
||||
if (this[_filename] == null) {
|
||||
const p = this[_path].lastIndexOf('/');
|
||||
this[_filename] = p > -1 ? this[_path].slice(p + 1) : this[_path];
|
||||
}
|
||||
return this[_filename];
|
||||
}
|
||||
|
||||
set filename(filename) {
|
||||
if (typeof filename !== 'string') {
|
||||
throw new TypeError('file.filename must be a string');
|
||||
}
|
||||
const old = this.filename;
|
||||
this.path =
|
||||
(old ? this[_path].slice(0, -old.length) : this[_path]) + filename;
|
||||
}
|
||||
|
||||
get ext() {
|
||||
if (this[_ext] == null) {
|
||||
const p1 = this[_path].lastIndexOf('.');
|
||||
const p2 = this[_path].lastIndexOf('/');
|
||||
this[_ext] = p1 > -1 && p1 > p2 ? this[_path].slice(p1) : '';
|
||||
}
|
||||
return this[_ext];
|
||||
}
|
||||
|
||||
set ext(ext) {
|
||||
if (typeof ext !== 'string') {
|
||||
throw new TypeError('file.ext must be a string');
|
||||
}
|
||||
const old = this.ext;
|
||||
this.path = (old ? this[_path].slice(0, -old.length) : this[_path]) + ext;
|
||||
}
|
||||
|
||||
get enc() {
|
||||
return this[_enc];
|
||||
}
|
||||
|
||||
set enc(enc) {
|
||||
if (!Buffer.isEncoding(enc)) {
|
||||
throw new TypeError('file.enc must be a supported encoding');
|
||||
}
|
||||
this[_enc] = enc;
|
||||
}
|
||||
|
||||
get bytes() {
|
||||
return this[_bytes] == null && this[_text] != null
|
||||
? (this[_bytes] = Buffer.from(this[_text], this[_enc]))
|
||||
: this[_bytes];
|
||||
}
|
||||
|
||||
set bytes(bytes) {
|
||||
if (bytes != null && !Buffer.isBuffer(bytes)) {
|
||||
throw new TypeError('file.bytes must be a Buffer or null');
|
||||
}
|
||||
this[_bytes] = bytes;
|
||||
this[_text] = null;
|
||||
}
|
||||
|
||||
get text() {
|
||||
return this[_text] == null && this[_bytes] != null
|
||||
? (this[_text] = this[_bytes].toString(this[_enc]))
|
||||
: this[_text];
|
||||
}
|
||||
|
||||
set text(text) {
|
||||
if (text != null && typeof text !== 'string') {
|
||||
throw new TypeError('file.text must be a string or null');
|
||||
}
|
||||
this[_text] = text;
|
||||
this[_bytes] = null;
|
||||
}
|
||||
}
|
122
src/File.ts
Normal file
122
src/File.ts
Normal file
@ -0,0 +1,122 @@
|
||||
import { Stats } from 'fs';
|
||||
|
||||
export default class File {
|
||||
// path of file
|
||||
private _path: string = null;
|
||||
// cached dir
|
||||
private _dir: string = null;
|
||||
// cached filename
|
||||
private _filename: string = null;
|
||||
// cached ext
|
||||
private _ext: string = null;
|
||||
// stats of file
|
||||
stats: Stats = null;
|
||||
// encoding
|
||||
private _enc: string = 'utf8';
|
||||
// Buffer of file contents
|
||||
private _bytes: Buffer = null;
|
||||
// string of file contents
|
||||
private _text: string = null;
|
||||
|
||||
get path(): string {
|
||||
return this._path;
|
||||
}
|
||||
|
||||
set path(path: string) {
|
||||
if (typeof path !== 'string') {
|
||||
throw new TypeError('file.path must be a string');
|
||||
}
|
||||
if (this._path !== path) {
|
||||
this._path = path;
|
||||
this._dir = this._filename = this._ext = null;
|
||||
}
|
||||
}
|
||||
|
||||
get dir(): string {
|
||||
if (this._dir == null) {
|
||||
const p = this._path.lastIndexOf('/');
|
||||
this._dir = p > -1 ? this._path.slice(0, p) : '';
|
||||
}
|
||||
return this._dir;
|
||||
}
|
||||
|
||||
set dir(dir: string) {
|
||||
if (typeof dir !== 'string') {
|
||||
throw new TypeError('file.dir must be a string');
|
||||
}
|
||||
this.path = (dir ? dir + '/' : '') + this.filename;
|
||||
}
|
||||
|
||||
get filename(): string {
|
||||
if (this._filename == null) {
|
||||
const p = this._path.lastIndexOf('/');
|
||||
this._filename = p > -1 ? this._path.slice(p + 1) : this._path;
|
||||
}
|
||||
return this._filename;
|
||||
}
|
||||
|
||||
set filename(filename) {
|
||||
if (typeof filename !== 'string') {
|
||||
throw new TypeError('file.filename must be a string');
|
||||
}
|
||||
const old = this.filename;
|
||||
this.path =
|
||||
(old ? this._path.slice(0, -old.length) : this._path) + filename;
|
||||
}
|
||||
|
||||
get ext(): string {
|
||||
if (this._ext == null) {
|
||||
const p1 = this._path.lastIndexOf('.');
|
||||
const p2 = this._path.lastIndexOf('/');
|
||||
this._ext = p1 > -1 && p1 > p2 ? this._path.slice(p1) : '';
|
||||
}
|
||||
return this._ext;
|
||||
}
|
||||
|
||||
set ext(ext: string) {
|
||||
if (typeof ext !== 'string') {
|
||||
throw new TypeError('file.ext must be a string');
|
||||
}
|
||||
const old = this.ext;
|
||||
this.path = (old ? this._path.slice(0, -old.length) : this._path) + ext;
|
||||
}
|
||||
|
||||
get enc(): string {
|
||||
return this._enc;
|
||||
}
|
||||
|
||||
set enc(enc: string) {
|
||||
if (!Buffer.isEncoding(enc)) {
|
||||
throw new TypeError('file.enc must be a supported encoding');
|
||||
}
|
||||
this._enc = enc;
|
||||
}
|
||||
|
||||
get bytes(): Buffer {
|
||||
return this._bytes == null && this._text != null
|
||||
? (this._bytes = Buffer.from(this._text, this._enc))
|
||||
: this._bytes;
|
||||
}
|
||||
|
||||
set bytes(bytes: Buffer) {
|
||||
if (bytes != null && !Buffer.isBuffer(bytes)) {
|
||||
throw new TypeError('file.bytes must be a Buffer or null');
|
||||
}
|
||||
this._bytes = bytes;
|
||||
this._text = null;
|
||||
}
|
||||
|
||||
get text(): string {
|
||||
return this._text == null && this._bytes != null
|
||||
? (this._text = this._bytes.toString(this._enc))
|
||||
: this._text;
|
||||
}
|
||||
|
||||
set text(text: string) {
|
||||
if (text != null && typeof text !== 'string') {
|
||||
throw new TypeError('file.text must be a string or null');
|
||||
}
|
||||
this._text = text;
|
||||
this._bytes = null;
|
||||
}
|
||||
}
|
125
src/Watcher.js
125
src/Watcher.js
@ -1,125 +0,0 @@
|
||||
import EventEmitter from 'events';
|
||||
import { stat, readdir } from './fs.js';
|
||||
import { watch } from 'fs';
|
||||
|
||||
const _watchers = Symbol();
|
||||
const _stats = Symbol();
|
||||
const _timeouts = Symbol();
|
||||
const _queue = Symbol();
|
||||
const _isProcessing = Symbol();
|
||||
const _recurse = Symbol();
|
||||
const _handle = Symbol();
|
||||
const _enqueue = Symbol();
|
||||
|
||||
export default class Watcher extends EventEmitter {
|
||||
constructor(data /* = { dir, filter, watch, debounce } */) {
|
||||
super();
|
||||
Object.assign(this, data);
|
||||
// paths of all directories -> FSWatcher instances
|
||||
this[_watchers] = new Map();
|
||||
// paths of all files -> file stats
|
||||
this[_stats] = new Map();
|
||||
// paths of files with pending debounced events -> setTimeout timer ids
|
||||
this[_timeouts] = new Map();
|
||||
// queue of pending FSWatcher events to handle
|
||||
this[_queue] = [];
|
||||
// whether some FSWatcher event is currently already in the process of being handled
|
||||
this[_isProcessing] = false;
|
||||
}
|
||||
|
||||
// recurse directory, get stats, set up FSWatcher instances
|
||||
// returns array of { path, stats }
|
||||
async init() {
|
||||
await this[_recurse](this.dir);
|
||||
return [...this[_stats].entries()].map(([path, stats]) => ({
|
||||
path,
|
||||
stats,
|
||||
}));
|
||||
}
|
||||
|
||||
// recurse a given directory
|
||||
async [_recurse](full) {
|
||||
const path = full.slice(this.dir.length + 1);
|
||||
const stats = await stat(full);
|
||||
if (this.filter && !await this.filter({ path, stats })) {
|
||||
return;
|
||||
}
|
||||
if (stats.isFile()) {
|
||||
this[_stats].set(path, stats);
|
||||
} else if (stats.isDirectory()) {
|
||||
if (this.watch) {
|
||||
this[_watchers].set(path, watch(full, this[_handle].bind(this, full)));
|
||||
}
|
||||
await Promise.all(
|
||||
(await readdir(full)).map(sub => this[_recurse](full + '/' + sub)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// handle FSWatcher event for given directory
|
||||
[_handle](dir, event, file) {
|
||||
const full = dir + '/' + file;
|
||||
if (this[_timeouts].has(full)) clearTimeout(this[_timeouts].get(full));
|
||||
this[_timeouts].set(
|
||||
full,
|
||||
setTimeout(() => {
|
||||
this[_timeouts].delete(full);
|
||||
this[_enqueue](full);
|
||||
}, this.debounce),
|
||||
);
|
||||
}
|
||||
|
||||
// add an FSWatcher event to the queue, and handle queued events
|
||||
async [_enqueue](full) {
|
||||
this[_queue].push(full);
|
||||
if (this[_isProcessing]) {
|
||||
return;
|
||||
}
|
||||
this[_isProcessing] = true;
|
||||
while (this[_queue].length) {
|
||||
const full = this[_queue].shift();
|
||||
const path = full.slice(this.dir.length + 1);
|
||||
try {
|
||||
const stats = await stat(full);
|
||||
if (this.filter && !await this.filter({ path, stats })) {
|
||||
continue;
|
||||
}
|
||||
if (stats.isFile()) {
|
||||
// note the new/changed file
|
||||
this[_stats].set(path, stats);
|
||||
this.emit('', { event: '+', path, stats });
|
||||
} else if (stats.isDirectory() && !this[_watchers].has(path)) {
|
||||
// note the new directory: start watching it, and report any files in it
|
||||
await this[_recurse](full);
|
||||
for (const [newPath, stats] of this[_stats].entries()) {
|
||||
if (newPath.startsWith(path + '/')) {
|
||||
this.emit('', { event: '+', path: newPath, stats });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// probably this was a deleted file/directory
|
||||
if (this[_stats].has(path)) {
|
||||
// note the deleted file
|
||||
this[_stats].delete(path);
|
||||
this.emit('', { event: '-', path });
|
||||
} else if (this[_watchers].has(path)) {
|
||||
// note the deleted directory: stop watching it, and report any files that were in it
|
||||
for (const old of this[_watchers].keys()) {
|
||||
if (old === path || old.startsWith(path + '/')) {
|
||||
this[_watchers].get(old).close();
|
||||
this[_watchers].delete(old);
|
||||
}
|
||||
}
|
||||
for (const old of this[_stats].keys()) {
|
||||
if (old.startsWith(path + '/')) {
|
||||
this[_stats].delete(old);
|
||||
this.emit('', { event: '-', path: old });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this[_isProcessing] = false;
|
||||
}
|
||||
}
|
129
src/Watcher.ts
Normal file
129
src/Watcher.ts
Normal file
@ -0,0 +1,129 @@
|
||||
import * as EventEmitter from 'events';
|
||||
import { stat, readdir } from './fs';
|
||||
import { watch, FSWatcher, Stats } from 'fs';
|
||||
|
||||
export default class Watcher extends EventEmitter {
|
||||
dir: string;
|
||||
filter: (file: { path: string; stats: Stats }) => boolean;
|
||||
watch: boolean;
|
||||
debounce: Number;
|
||||
// paths of all directories -> FSWatcher instances
|
||||
private _watchers = new Map<string, FSWatcher>();
|
||||
// paths of all files -> file stats
|
||||
private _stats = new Map<string, Stats>();
|
||||
// paths of files with pending debounced events -> setTimeout timer ids
|
||||
private _timeouts = new Map<string, number>();
|
||||
// queue of pending FSWatcher events to handle
|
||||
private _queue = new Array<string>();
|
||||
// whether some FSWatcher event is currently already in the process of being handled
|
||||
private _isProcessing: boolean = false;
|
||||
|
||||
constructor(data: object /* = { dir, filter, watch, debounce } */) {
|
||||
super();
|
||||
Object.assign(this, data);
|
||||
}
|
||||
|
||||
// recurse directory, get stats, set up FSWatcher instances
|
||||
// returns array of { path, stats }
|
||||
async init(): Promise<{ path: string; stats: Stats }[]> {
|
||||
await this._recurse(this.dir);
|
||||
return [...this._stats.entries()].map(([path, stats]) => ({
|
||||
path,
|
||||
stats,
|
||||
}));
|
||||
}
|
||||
|
||||
// recurse a given directory
|
||||
private async _recurse(full: string): Promise<void> {
|
||||
const path = full.slice(this.dir.length + 1);
|
||||
const stats = await stat(full);
|
||||
if (this.filter && !(await this.filter({ path, stats }))) {
|
||||
return;
|
||||
}
|
||||
if (stats.isFile()) {
|
||||
this._stats.set(path, stats);
|
||||
} else if (stats.isDirectory()) {
|
||||
if (this.watch) {
|
||||
this._watchers.set(path, watch(full, this._handle.bind(this, full)));
|
||||
}
|
||||
await Promise.all(
|
||||
(await readdir(full)).map(sub => this._recurse(full + '/' + sub)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// handle FSWatcher event for given directory
|
||||
private _handle(dir: string, event: string, file: string): void {
|
||||
const full = dir + '/' + file;
|
||||
if (this._timeouts.has(full)) {
|
||||
clearTimeout(this._timeouts.get(full));
|
||||
}
|
||||
this._timeouts.set(
|
||||
full,
|
||||
setTimeout(() => {
|
||||
this._timeouts.delete(full);
|
||||
this._enqueue(full);
|
||||
}, this.debounce),
|
||||
);
|
||||
}
|
||||
|
||||
// add an FSWatcher event to the queue, and handle queued events
|
||||
private async _enqueue(full: string): Promise<void> {
|
||||
this._queue.push(full);
|
||||
if (this._isProcessing) {
|
||||
return;
|
||||
}
|
||||
this._isProcessing = true;
|
||||
while (this._queue.length) {
|
||||
const full = this._queue.shift();
|
||||
const path = full.slice(this.dir.length + 1);
|
||||
try {
|
||||
const stats = await stat(full);
|
||||
if (this.filter && !(await this.filter({ path, stats }))) {
|
||||
continue;
|
||||
}
|
||||
if (stats.isFile()) {
|
||||
// note the new/changed file
|
||||
this._stats.set(path, stats);
|
||||
this.emit('', { event: '+', path, stats });
|
||||
} else if (stats.isDirectory() && !this._watchers.has(path)) {
|
||||
// note the new directory: start watching it, and report any files in it
|
||||
await this._recurse(full);
|
||||
for (const [newPath, stats] of this._stats.entries()) {
|
||||
if (newPath.startsWith(path + '/')) {
|
||||
this.emit('', { event: '+', path: newPath, stats });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// probably this was a deleted file/directory
|
||||
if (this._stats.has(path)) {
|
||||
// note the deleted file
|
||||
this._stats.delete(path);
|
||||
this.emit('', { event: '-', path });
|
||||
} else if (this._watchers.has(path)) {
|
||||
// note the deleted directory: stop watching it, and report any files that were in it
|
||||
for (const old of this._watchers.keys()) {
|
||||
if (old === path || old.startsWith(path + '/')) {
|
||||
this._watchers.get(old).close();
|
||||
this._watchers.delete(old);
|
||||
}
|
||||
}
|
||||
for (const old of this._stats.keys()) {
|
||||
if (old.startsWith(path + '/')) {
|
||||
this._stats.delete(old);
|
||||
this.emit('', { event: '-', path: old });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
this._isProcessing = false;
|
||||
}
|
||||
}
|
||||
|
||||
export interface WatcherEvent {
|
||||
event: string;
|
||||
path: string;
|
||||
stats?: Stats;
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
import { createHook, executionAsyncId } from 'async_hooks';
|
||||
|
||||
const contexts = new Map();
|
||||
|
||||
createHook({
|
||||
init: (id, _, trigger) => contexts.set(id, contexts.get(trigger)),
|
||||
destroy: id => contexts.delete(id),
|
||||
}).enable();
|
||||
|
||||
export const create = data => contexts.set(executionAsyncId(), data);
|
||||
|
||||
export const current = () => contexts.get(executionAsyncId());
|
14
src/context.ts
Normal file
14
src/context.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { createHook, executionAsyncId } from 'async_hooks';
|
||||
|
||||
const contexts = new Map<Number, any>();
|
||||
|
||||
createHook({
|
||||
init: (id, _, trigger) => contexts.set(id, contexts.get(trigger)),
|
||||
destroy: id => contexts.delete(id),
|
||||
}).enable();
|
||||
|
||||
export const create = (data: any): void => {
|
||||
contexts.set(executionAsyncId(), data);
|
||||
};
|
||||
|
||||
export const current = (): any => contexts.get(executionAsyncId());
|
@ -1,2 +0,0 @@
|
||||
export { default as File } from './File.js';
|
||||
export { default as Defiler } from './Defiler.js';
|
2
src/index.ts
Normal file
2
src/index.ts
Normal file
@ -0,0 +1,2 @@
|
||||
export { default as File } from './File';
|
||||
export { default as Defiler } from './Defiler';
|
11
tsconfig.json
Normal file
11
tsconfig.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
"noUnusedLocals": true,
|
||||
"removeComments": true,
|
||||
"sourceMap": true,
|
||||
"target": "esnext"
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
Loading…
Reference in New Issue