This commit is contained in:
walkpan
2024-08-18 23:38:55 +08:00
parent e8dbb9bab3
commit 8f57f57c1d
1004 changed files with 234067 additions and 16087 deletions

View File

@@ -0,0 +1,30 @@
/* CONSTS */
const DEFAULT_ENCODING = 'utf8';
const DEFAULT_FILE_MODE = 0o666;
const DEFAULT_FOLDER_MODE = 0o777;
const DEFAULT_READ_OPTIONS = {};
const DEFAULT_WRITE_OPTIONS = {};
const DEFAULT_TIMEOUT_ASYNC = 5000;
const DEFAULT_TIMEOUT_SYNC = 100;
const IS_POSIX = !!process.getuid;
const IS_USER_ROOT = process.getuid ? !process.getuid () : false;
const LIMIT_BASENAME_LENGTH = 128; //TODO: fetch the real limit from the filesystem //TODO: fetch the whole-path length limit too
const LIMIT_FILES_DESCRIPTORS = 10000; //TODO: fetch the real limit from the filesystem
const NOOP = () => {};
/* EXPORT */
export {DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX, IS_USER_ROOT, LIMIT_BASENAME_LENGTH, LIMIT_FILES_DESCRIPTORS, NOOP};

View File

@@ -0,0 +1,270 @@
/* IMPORT */
import * as path from 'path';
import {DEFAULT_ENCODING, DEFAULT_FILE_MODE, DEFAULT_FOLDER_MODE, DEFAULT_READ_OPTIONS, DEFAULT_WRITE_OPTIONS, DEFAULT_TIMEOUT_ASYNC, DEFAULT_TIMEOUT_SYNC, IS_POSIX} from './consts';
import FS from './utils/fs';
import Lang from './utils/lang';
import Scheduler from './utils/scheduler';
import Temp from './utils/temp';
import {Callback, Data, Disposer, Path, ReadOptions, WriteOptions} from './types';
/* ATOMICALLY */
function readFile ( filePath: Path, options: string | ReadOptions & { encoding: string } ): Promise<string>;
function readFile ( filePath: Path, options?: ReadOptions ): Promise<Buffer>;
function readFile ( filePath: Path, options: string | ReadOptions = DEFAULT_READ_OPTIONS ): Promise<Buffer | string> {
if ( Lang.isString ( options ) ) return readFile ( filePath, { encoding: options } );
const timeout = Date.now () + ( options.timeout ?? DEFAULT_TIMEOUT_ASYNC );
return FS.readFileRetry ( timeout )( filePath, options );
};
function readFileSync ( filePath: Path, options: string | ReadOptions & { encoding: string } ): string;
function readFileSync ( filePath: Path, options?: ReadOptions ): Buffer;
function readFileSync ( filePath: Path, options: string | ReadOptions = DEFAULT_READ_OPTIONS ): Buffer | string {
if ( Lang.isString ( options ) ) return readFileSync ( filePath, { encoding: options } );
const timeout = Date.now () + ( options.timeout ?? DEFAULT_TIMEOUT_SYNC );
return FS.readFileSyncRetry ( timeout )( filePath, options );
};
const writeFile = ( filePath: Path, data: Data, options?: string | WriteOptions | Callback, callback?: Callback ): Promise<void> => {
if ( Lang.isFunction ( options ) ) return writeFile ( filePath, data, DEFAULT_WRITE_OPTIONS, options );
const promise = writeFileAsync ( filePath, data, options );
if ( callback ) promise.then ( callback, callback );
return promise;
};
const writeFileAsync = async ( filePath: Path, data: Data, options: string | WriteOptions = DEFAULT_WRITE_OPTIONS ): Promise<void> => {
if ( Lang.isString ( options ) ) return writeFileAsync ( filePath, data, { encoding: options } );
const timeout = Date.now () + ( options.timeout ?? DEFAULT_TIMEOUT_ASYNC );
let schedulerCustomDisposer: Disposer | null = null,
schedulerDisposer: Disposer | null = null,
tempDisposer: Disposer | null = null,
tempPath: string | null = null,
fd: number | null = null;
try {
if ( options.schedule ) schedulerCustomDisposer = await options.schedule ( filePath );
schedulerDisposer = await Scheduler.schedule ( filePath );
filePath = await FS.realpathAttempt ( filePath ) || filePath;
[tempPath, tempDisposer] = Temp.get ( filePath, options.tmpCreate || Temp.create, !( options.tmpPurge === false ) );
const useStatChown = IS_POSIX && Lang.isUndefined ( options.chown ),
useStatMode = Lang.isUndefined ( options.mode );
if ( useStatChown || useStatMode ) {
const stat = await FS.statAttempt ( filePath );
if ( stat ) {
options = { ...options };
if ( useStatChown ) options.chown = { uid: stat.uid, gid: stat.gid };
if ( useStatMode ) options.mode = stat.mode;
}
}
const parentPath = path.dirname ( filePath );
await FS.mkdirAttempt ( parentPath, {
mode: DEFAULT_FOLDER_MODE,
recursive: true
});
fd = await FS.openRetry ( timeout )( tempPath, 'w', options.mode || DEFAULT_FILE_MODE );
if ( options.tmpCreated ) options.tmpCreated ( tempPath );
if ( Lang.isString ( data ) ) {
await FS.writeRetry ( timeout )( fd, data, 0, options.encoding || DEFAULT_ENCODING );
} else if ( !Lang.isUndefined ( data ) ) {
await FS.writeRetry ( timeout )( fd, data, 0, data.length, 0 );
}
if ( options.fsync !== false ) {
if ( options.fsyncWait !== false ) {
await FS.fsyncRetry ( timeout )( fd );
} else {
FS.fsyncAttempt ( fd );
}
}
await FS.closeRetry ( timeout )( fd );
fd = null;
if ( options.chown ) await FS.chownAttempt ( tempPath, options.chown.uid, options.chown.gid );
if ( options.mode ) await FS.chmodAttempt ( tempPath, options.mode );
try {
await FS.renameRetry ( timeout )( tempPath, filePath );
} catch ( error ) {
if ( error.code !== 'ENAMETOOLONG' ) throw error;
await FS.renameRetry ( timeout )( tempPath, Temp.truncate ( filePath ) );
}
tempDisposer ();
tempPath = null;
} finally {
if ( fd ) await FS.closeAttempt ( fd );
if ( tempPath ) Temp.purge ( tempPath );
if ( schedulerCustomDisposer ) schedulerCustomDisposer ();
if ( schedulerDisposer ) schedulerDisposer ();
}
};
const writeFileSync = ( filePath: Path, data: Data, options: string | WriteOptions = DEFAULT_WRITE_OPTIONS ): void => {
if ( Lang.isString ( options ) ) return writeFileSync ( filePath, data, { encoding: options } );
const timeout = Date.now () + ( options.timeout ?? DEFAULT_TIMEOUT_SYNC );
let tempDisposer: Disposer | null = null,
tempPath: string | null = null,
fd: number | null = null;
try {
filePath = FS.realpathSyncAttempt ( filePath ) || filePath;
[tempPath, tempDisposer] = Temp.get ( filePath, options.tmpCreate || Temp.create, !( options.tmpPurge === false ) );
const useStatChown = IS_POSIX && Lang.isUndefined ( options.chown ),
useStatMode = Lang.isUndefined ( options.mode );
if ( useStatChown || useStatMode ) {
const stat = FS.statSyncAttempt ( filePath );
if ( stat ) {
options = { ...options };
if ( useStatChown ) options.chown = { uid: stat.uid, gid: stat.gid };
if ( useStatMode ) options.mode = stat.mode;
}
}
const parentPath = path.dirname ( filePath );
FS.mkdirSyncAttempt ( parentPath, {
mode: DEFAULT_FOLDER_MODE,
recursive: true
});
fd = FS.openSyncRetry ( timeout )( tempPath, 'w', options.mode || DEFAULT_FILE_MODE );
if ( options.tmpCreated ) options.tmpCreated ( tempPath );
if ( Lang.isString ( data ) ) {
FS.writeSyncRetry ( timeout )( fd, data, 0, options.encoding || DEFAULT_ENCODING );
} else if ( !Lang.isUndefined ( data ) ) {
FS.writeSyncRetry ( timeout )( fd, data, 0, data.length, 0 );
}
if ( options.fsync !== false ) {
if ( options.fsyncWait !== false ) {
FS.fsyncSyncRetry ( timeout )( fd );
} else {
FS.fsyncAttempt ( fd );
}
}
FS.closeSyncRetry ( timeout )( fd );
fd = null;
if ( options.chown ) FS.chownSyncAttempt ( tempPath, options.chown.uid, options.chown.gid );
if ( options.mode ) FS.chmodSyncAttempt ( tempPath, options.mode );
try {
FS.renameSyncRetry ( timeout )( tempPath, filePath );
} catch ( error ) {
if ( error.code !== 'ENAMETOOLONG' ) throw error;
FS.renameSyncRetry ( timeout )( tempPath, Temp.truncate ( filePath ) );
}
tempDisposer ();
tempPath = null;
} finally {
if ( fd ) FS.closeSyncAttempt ( fd );
if ( tempPath ) Temp.purge ( tempPath );
}
};
/* EXPORT */
export {readFile, readFileSync, writeFile, writeFileSync};

View File

@@ -0,0 +1,37 @@
/* TYPES */
type Callback = ( error: Exception | void ) => any;
type Data = Buffer | string | undefined;
type Disposer = () => void;
type Exception = NodeJS.ErrnoException;
type FN<Arguments extends any[] = any[], Return = any> = ( ...args: Arguments ) => Return;
type Path = string;
type ReadOptions = {
encoding?: string | null,
mode?: string | number | false,
timeout?: number
};
type WriteOptions = {
chown?: { gid: number, uid: number } | false,
encoding?: string | null,
fsync?: boolean,
fsyncWait?: boolean,
mode?: string | number | false,
schedule?: ( filePath: string ) => Promise<Disposer>,
timeout?: number,
tmpCreate?: ( filePath: string ) => string,
tmpCreated?: ( filePath: string ) => any,
tmpPurge?: boolean
};
/* EXPORT */
export {Callback, Data, Disposer, Exception, FN, Path, ReadOptions, WriteOptions};

View File

@@ -0,0 +1,42 @@
/* IMPORT */
import {NOOP} from '../consts';
import {Exception, FN} from '../types';
/* ATTEMPTIFY */
//TODO: Maybe publish this as a standalone package
//FIXME: The type castings here aren't exactly correct
const attemptifyAsync = <T extends FN> ( fn: T, onError: FN<[Exception]> = NOOP ): T => {
return function () {
return fn.apply ( undefined, arguments ).catch ( onError );
} as T;
};
const attemptifySync = <T extends FN> ( fn: T, onError: FN<[Exception]> = NOOP ): T => {
return function () {
try {
return fn.apply ( undefined, arguments );
} catch ( error ) {
return onError ( error );
}
} as T;
};
/* EXPORT */
export {attemptifyAsync, attemptifySync};

View File

@@ -0,0 +1,51 @@
/* IMPORT */
import * as fs from 'fs';
import {promisify} from 'util';
import {attemptifyAsync, attemptifySync} from './attemptify';
import Handlers from './fs_handlers';
import {retryifyAsync, retryifySync} from './retryify';
/* FS */
const FS = {
chmodAttempt: attemptifyAsync ( promisify ( fs.chmod ), Handlers.onChangeError ),
chownAttempt: attemptifyAsync ( promisify ( fs.chown ), Handlers.onChangeError ),
closeAttempt: attemptifyAsync ( promisify ( fs.close ) ),
fsyncAttempt: attemptifyAsync ( promisify ( fs.fsync ) ),
mkdirAttempt: attemptifyAsync ( promisify ( fs.mkdir ) ),
realpathAttempt: attemptifyAsync ( promisify ( fs.realpath ) ),
statAttempt: attemptifyAsync ( promisify ( fs.stat ) ),
unlinkAttempt: attemptifyAsync ( promisify ( fs.unlink ) ),
closeRetry: retryifyAsync ( promisify ( fs.close ), Handlers.isRetriableError ),
fsyncRetry: retryifyAsync ( promisify ( fs.fsync ), Handlers.isRetriableError ),
openRetry: retryifyAsync ( promisify ( fs.open ), Handlers.isRetriableError ),
readFileRetry: retryifyAsync ( promisify ( fs.readFile ), Handlers.isRetriableError ),
renameRetry: retryifyAsync ( promisify ( fs.rename ), Handlers.isRetriableError ),
statRetry: retryifyAsync ( promisify ( fs.stat ), Handlers.isRetriableError ),
writeRetry: retryifyAsync ( promisify ( fs.write ), Handlers.isRetriableError ),
chmodSyncAttempt: attemptifySync ( fs.chmodSync, Handlers.onChangeError ),
chownSyncAttempt: attemptifySync ( fs.chownSync, Handlers.onChangeError ),
closeSyncAttempt: attemptifySync ( fs.closeSync ),
mkdirSyncAttempt: attemptifySync ( fs.mkdirSync ),
realpathSyncAttempt: attemptifySync ( fs.realpathSync ),
statSyncAttempt: attemptifySync ( fs.statSync ),
unlinkSyncAttempt: attemptifySync ( fs.unlinkSync ),
closeSyncRetry: retryifySync ( fs.closeSync, Handlers.isRetriableError ),
fsyncSyncRetry: retryifySync ( fs.fsyncSync, Handlers.isRetriableError ),
openSyncRetry: retryifySync ( fs.openSync, Handlers.isRetriableError ),
readFileSyncRetry: retryifySync ( fs.readFileSync, Handlers.isRetriableError ),
renameSyncRetry: retryifySync ( fs.renameSync, Handlers.isRetriableError ),
statSyncRetry: retryifySync ( fs.statSync, Handlers.isRetriableError ),
writeSyncRetry: retryifySync ( fs.writeSync, Handlers.isRetriableError )
};
/* EXPORT */
export default FS;

View File

@@ -0,0 +1,45 @@
/* IMPORT */
import {IS_USER_ROOT} from '../consts';
import {Exception} from '../types';
/* FS HANDLERS */
const Handlers = {
isChangeErrorOk: ( error: Exception ): boolean => { //URL: https://github.com/isaacs/node-graceful-fs/blob/master/polyfills.js#L315-L342
const {code} = error;
if ( code === 'ENOSYS' ) return true;
if ( !IS_USER_ROOT && ( code === 'EINVAL' || code === 'EPERM' ) ) return true;
return false;
},
isRetriableError: ( error: Exception ): boolean => {
const {code} = error;
if ( code === 'EMFILE' || code === 'ENFILE' || code === 'EAGAIN' || code === 'EBUSY' || code === 'EACCESS' || code === 'EACCS' || code === 'EPERM' ) return true;
return false;
},
onChangeError: ( error: Exception ): void => {
if ( Handlers.isChangeErrorOk ( error ) ) return;
throw error;
}
};
/* EXPORT */
export default Handlers;

View File

@@ -0,0 +1,28 @@
/* LANG */
const Lang = {
isFunction: ( x: any ): x is Function => {
return typeof x === 'function';
},
isString: ( x: any ): x is string => {
return typeof x === 'string';
},
isUndefined: ( x: any ): x is undefined => {
return typeof x === 'undefined';
}
};
/* EXPORT */
export default Lang;

View File

@@ -0,0 +1,78 @@
/* IMPORT */
import {Exception, FN} from '../types';
import RetryfyQueue from './retryify_queue';
/* RETRYIFY */
const retryifyAsync = <T extends FN> ( fn: T, isRetriableError: FN<[Exception], boolean | void> ): FN<[number], T> => {
return function ( timestamp: number ) {
return function attempt () {
return RetryfyQueue.schedule ().then ( cleanup => {
return fn.apply ( undefined, arguments ).then ( result => {
cleanup ();
return result;
}, error => {
cleanup ();
if ( Date.now () >= timestamp ) throw error;
if ( isRetriableError ( error ) ) {
const delay = Math.round ( 100 + ( 400 * Math.random () ) ),
delayPromise = new Promise ( resolve => setTimeout ( resolve, delay ) );
return delayPromise.then ( () => attempt.apply ( undefined, arguments ) );
}
throw error;
});
});
} as T;
};
};
const retryifySync = <T extends FN> ( fn: T, isRetriableError: FN<[Exception], boolean | void> ): FN<[number], T> => {
return function ( timestamp: number ) {
return function attempt () {
try {
return fn.apply ( undefined, arguments );
} catch ( error ) {
if ( Date.now () > timestamp ) throw error;
if ( isRetriableError ( error ) ) return attempt.apply ( undefined, arguments );
throw error;
}
} as T;
};
};
/* EXPORT */
export {retryifyAsync, retryifySync};

View File

@@ -0,0 +1,95 @@
/* IMPORT */
import {LIMIT_FILES_DESCRIPTORS} from '../consts';
/* RETRYIFY QUEUE */
const RetryfyQueue = {
interval: 25,
intervalId: <NodeJS.Timeout | undefined> undefined,
limit: LIMIT_FILES_DESCRIPTORS,
queueActive: new Set<Function> (),
queueWaiting: new Set<Function> (),
init: (): void => {
if ( RetryfyQueue.intervalId ) return;
RetryfyQueue.intervalId = setInterval ( RetryfyQueue.tick, RetryfyQueue.interval );
},
reset: (): void => {
if ( !RetryfyQueue.intervalId ) return;
clearInterval ( RetryfyQueue.intervalId );
delete RetryfyQueue.intervalId;
},
add: ( fn: Function ): void => {
RetryfyQueue.queueWaiting.add ( fn );
if ( RetryfyQueue.queueActive.size < ( RetryfyQueue.limit / 2 ) ) { // Active queue not under preassure, executing immediately
RetryfyQueue.tick ();
} else {
RetryfyQueue.init ();
}
},
remove: ( fn: Function ): void => {
RetryfyQueue.queueWaiting.delete ( fn );
RetryfyQueue.queueActive.delete ( fn );
},
schedule: (): Promise<Function> => {
return new Promise ( resolve => {
const cleanup = () => RetryfyQueue.remove ( resolver );
const resolver = () => resolve ( cleanup );
RetryfyQueue.add ( resolver );
});
},
tick: (): void => {
if ( RetryfyQueue.queueActive.size >= RetryfyQueue.limit ) return;
if ( !RetryfyQueue.queueWaiting.size ) return RetryfyQueue.reset ();
for ( const fn of RetryfyQueue.queueWaiting ) {
if ( RetryfyQueue.queueActive.size >= RetryfyQueue.limit ) break;
RetryfyQueue.queueWaiting.delete ( fn );
RetryfyQueue.queueActive.add ( fn );
fn ();
}
}
};
/* EXPORT */
export default RetryfyQueue;

View File

@@ -0,0 +1,60 @@
/* IMPORT */
import {Disposer} from '../types';
/* VARIABLES */
const Queues: Record<string, Function[] | undefined> = {};
/* SCHEDULER */
//TODO: Maybe publish this as a standalone package
const Scheduler = {
next: ( id: string ): void => {
const queue = Queues[id];
if ( !queue ) return;
queue.shift ();
const job = queue[0];
if ( job ) {
job ( () => Scheduler.next ( id ) );
} else {
delete Queues[id];
}
},
schedule: ( id: string ): Promise<Disposer> => {
return new Promise ( resolve => {
let queue = Queues[id];
if ( !queue ) queue = Queues[id] = [];
queue.push ( resolve );
if ( queue.length > 1 ) return;
resolve ( () => Scheduler.next ( id ) );
});
}
};
/* EXPORT */
export default Scheduler;

View File

@@ -0,0 +1,97 @@
/* IMPORT */
import * as path from 'path';
import {LIMIT_BASENAME_LENGTH} from '../consts';
import {Disposer} from '../types';
import FS from './fs';
/* TEMP */
//TODO: Maybe publish this as a standalone package
const Temp = {
store: <Record<string, boolean>> {}, // filePath => purge
create: ( filePath: string ): string => {
const randomness = `000000${Math.floor ( Math.random () * 16777215 ).toString ( 16 )}`.slice ( -6 ), // 6 random-enough hex characters
timestamp = Date.now ().toString ().slice ( -10 ), // 10 precise timestamp digits
prefix = 'tmp-',
suffix = `.${prefix}${timestamp}${randomness}`,
tempPath = `${filePath}${suffix}`;
return tempPath;
},
get: ( filePath: string, creator: ( filePath: string ) => string, purge: boolean = true ): [string, Disposer] => {
const tempPath = Temp.truncate ( creator ( filePath ) );
if ( tempPath in Temp.store ) return Temp.get ( filePath, creator, purge ); // Collision found, try again
Temp.store[tempPath] = purge;
const disposer = () => delete Temp.store[tempPath];
return [tempPath, disposer];
},
purge: ( filePath: string ): void => {
if ( !Temp.store[filePath] ) return;
delete Temp.store[filePath];
FS.unlinkAttempt ( filePath );
},
purgeSync: ( filePath: string ): void => {
if ( !Temp.store[filePath] ) return;
delete Temp.store[filePath];
FS.unlinkSyncAttempt ( filePath );
},
purgeSyncAll: (): void => {
for ( const filePath in Temp.store ) {
Temp.purgeSync ( filePath );
}
},
truncate: ( filePath: string ): string => { // Truncating paths to avoid getting an "ENAMETOOLONG" error //FIXME: This doesn't really always work, the actual filesystem limits must be detected for this to be implemented correctly
const basename = path.basename ( filePath );
if ( basename.length <= LIMIT_BASENAME_LENGTH ) return filePath; //FIXME: Rough and quick attempt at detecting ok lengths
const truncable = /^(\.?)(.*?)((?:\.[^.]+)?(?:\.tmp-\d{10}[a-f0-9]{6})?)$/.exec ( basename );
if ( !truncable ) return filePath; //FIXME: No truncable part detected, can't really do much without also changing the parent path, which is unsafe, hoping for the best here
const truncationLength = basename.length - LIMIT_BASENAME_LENGTH;
return `${filePath.slice ( 0, - basename.length )}${truncable[1]}${truncable[2].slice ( 0, - truncationLength )}${truncable[3]}`; //FIXME: The truncable part might be shorter than needed here
}
};
/* INIT */
process.on ( 'exit', Temp.purgeSyncAll ); // Ensuring purgeable temp files are purged on exit
/* EXPORT */
export default Temp;