1
Fork 0

convert to ESM

This commit is contained in:
Conduitry 2021-04-21 08:09:10 -04:00
parent 219ca6c083
commit fd086972a3
3 changed files with 96 additions and 80 deletions

157
crypt.js
View File

@ -1,4 +1,4 @@
const CACHE_PATH = __dirname + '/cache';
const CACHE_PATH = dirname(fileURLToPath(import.meta.url)) + '/cache';
const DEFAULT_CIPHER_ALGORITHM = 'aes-256-cbc';
const DEFAULT_HASH_ALGORITHM = 'sha512';
const DEFAULT_SPLIT_SIZE = 33554432;
@ -6,20 +6,43 @@ const HMAC_KEY_LENGTH = 32;
const RSA_KEY_BITS = 2048;
const STREAM_CONCURRENCY = 8;
const crypto = require('crypto');
const fs = require('fs');
const path_ = require('path');
const v8 = require('v8');
import {
createCipheriv,
createDecipheriv,
createHash,
createHmac,
createPrivateKey,
createPublicKey,
generateKeyPairSync,
getCipherInfo,
privateDecrypt,
publicEncrypt,
randomBytes,
} from 'crypto';
import {
accessSync,
createReadStream,
createWriteStream,
mkdirSync,
readdirSync,
readFileSync,
statSync,
unlinkSync,
writeFileSync,
} from 'fs';
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import { deserialize, serialize } from 'v8';
function init({
export function init({
crypt: crypt_dir,
cipher: cipher_algorithm = DEFAULT_CIPHER_ALGORITHM,
hash: hash_algorithm = DEFAULT_HASH_ALGORITHM,
split: split_size = DEFAULT_SPLIT_SIZE,
passphrase,
}) {
fs.mkdirSync(crypt_dir, { recursive: true });
fs.writeFileSync(
mkdirSync(crypt_dir, { recursive: true });
writeFileSync(
crypt_dir + '/info',
cipher_algorithm +
'\n' +
@ -27,30 +50,30 @@ function init({
'\n' +
split_size +
'\n' +
crypto.randomBytes(HMAC_KEY_LENGTH).toString('base64url') +
randomBytes(HMAC_KEY_LENGTH).toString('base64url') +
'\n',
);
const pair = crypto.generateKeyPairSync('rsa', { modulusLength: RSA_KEY_BITS });
fs.writeFileSync(crypt_dir + '/public', pair.publicKey.export({ type: 'spki', format: 'pem' }));
fs.writeFileSync(
const pair = generateKeyPairSync('rsa', { modulusLength: RSA_KEY_BITS });
writeFileSync(crypt_dir + '/public', pair.publicKey.export({ type: 'spki', format: 'pem' }));
writeFileSync(
crypt_dir + '/private',
pair.privateKey.export({ type: 'pkcs8', format: 'pem', cipher: cipher_algorithm, passphrase }),
);
}
function get_info(crypt_dir, passphrase) {
const s = fs.readFileSync(crypt_dir + '/info', 'ascii').match(/\S+/g);
const s = readFileSync(crypt_dir + '/info', 'ascii').match(/\S+/g);
const info = {
cipher_algorithm: s[0],
hash_algorithm: s[1],
split_size: +s[2],
hmac_key: Buffer.from(s[3], 'base64url'),
public_key: crypto.createPublicKey(fs.readFileSync(crypt_dir + '/public')),
public_key: createPublicKey(readFileSync(crypt_dir + '/public')),
index: new Map(),
};
for (const dirent of fs.readdirSync(crypt_dir, { withFileTypes: true })) {
for (const dirent of readdirSync(crypt_dir, { withFileTypes: true })) {
if (dirent.isFile() && dirent.name.endsWith('-index')) {
const s = fs.readFileSync(crypt_dir + '/' + dirent.name, 'ascii').match(/\S+/g);
const s = readFileSync(crypt_dir + '/' + dirent.name, 'ascii').match(/\S+/g);
info.index.set(dirent.name.slice(0, -6), {
hash_hmac: Buffer.from(s[0], 'base64url'),
key: Buffer.from(s[1], 'base64url'),
@ -60,8 +83,8 @@ function get_info(crypt_dir, passphrase) {
}
}
if (passphrase != null) {
info.private_key = crypto.createPrivateKey({
key: fs.readFileSync(crypt_dir + '/private'),
info.private_key = createPrivateKey({
key: readFileSync(crypt_dir + '/private'),
passphrase,
});
}
@ -71,7 +94,7 @@ function get_info(crypt_dir, passphrase) {
async function get_plain_index(plain_dir, hash_algorithm, filter) {
let cache;
try {
cache = v8.deserialize(fs.readFileSync(CACHE_PATH));
cache = deserialize(readFileSync(CACHE_PATH));
} catch {
cache = new Map();
}
@ -80,9 +103,9 @@ async function get_plain_index(plain_dir, hash_algorithm, filter) {
const stream_queue = make_stream_queue();
while (pending.length) {
const dir = pending.shift();
for (const name of fs.readdirSync(dir)) {
for (const name of readdirSync(dir)) {
const path = dir + '/' + name;
const stats = fs.statSync(path);
const stats = statSync(path);
if (stats.isFile()) {
if (!filter || filter(path.slice(plain_dir.length + 1))) {
const key = path + ':' + hash_algorithm;
@ -97,9 +120,8 @@ async function get_plain_index(plain_dir, hash_algorithm, filter) {
});
} else {
stream_queue.add(() =>
fs
.createReadStream(path)
.pipe(crypto.createHash(hash_algorithm))
createReadStream(path)
.pipe(createHash(hash_algorithm))
.once('readable', function () {
const hash = this.read();
cache.set(key, { size: stats.size, mtimeMs: stats.mtimeMs, hash });
@ -114,14 +136,13 @@ async function get_plain_index(plain_dir, hash_algorithm, filter) {
}
}
await stream_queue.done();
fs.writeFileSync(CACHE_PATH, v8.serialize(cache));
writeFileSync(CACHE_PATH, serialize(cache));
return plain_index;
}
function get_crypt_filename(info, path, start) {
return (
crypto
.createHmac(info.hash_algorithm, info.hmac_key)
createHmac(info.hash_algorithm, info.hmac_key)
.update(path + '@' + start)
.digest('base64url') + '-data'
);
@ -158,20 +179,20 @@ function make_stream_queue() {
};
}
async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
export async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
const added = new Set();
const deleted = new Set();
const updated = new Set();
// READ CRYPT INDEX
const info = get_info(crypt_dir);
const { keyLength, ivLength } = crypto.getCipherInfo(info.cipher_algorithm);
const { keyLength, ivLength } = getCipherInfo(info.cipher_algorithm);
// CONSTRUCT PLAIN INDEX
const plain_index = await get_plain_index(plain_dir, info.hash_algorithm, filter);
// CREATE INDEX OF PLAIN FILES AS THEY WILL APPEAR IN THE CRYPT INDEX
const path_hmac_lookup = new Map();
for (const path of plain_index.keys()) {
path_hmac_lookup.set(
crypto.createHmac(info.hash_algorithm, info.hmac_key).update(path).digest('base64url'),
createHmac(info.hash_algorithm, info.hmac_key).update(path).digest('base64url'),
path,
);
}
@ -179,14 +200,14 @@ async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
for (const path_hmac of info.index.keys()) {
if (!path_hmac_lookup.has(path_hmac)) {
deleted.add(path_hmac + '-index');
fs.unlinkSync(crypt_dir + '/' + path_hmac + '-index');
unlinkSync(crypt_dir + '/' + path_hmac + '-index');
}
}
// UPDATE/ADD FILES
const stream_queue = make_stream_queue();
for (const [path_hmac, path] of path_hmac_lookup) {
const { size, hash } = plain_index.get(path);
const hash_hmac = crypto.createHmac(info.hash_algorithm, info.hmac_key).update(hash).digest();
const hash_hmac = createHmac(info.hash_algorithm, info.hmac_key).update(hash).digest();
if (!info.index.has(path_hmac)) {
added.add(path);
} else if (Buffer.compare(info.index.get(path_hmac).hash_hmac, hash_hmac)) {
@ -194,16 +215,16 @@ async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
} else {
continue;
}
const key = crypto.randomBytes(keyLength);
const iv = crypto.randomBytes(ivLength);
const cipher = crypto.createCipheriv(info.cipher_algorithm, key, iv);
fs.writeFileSync(
const key = randomBytes(keyLength);
const iv = randomBytes(ivLength);
const cipher = createCipheriv(info.cipher_algorithm, key, iv);
writeFileSync(
crypt_dir + '/' + path_hmac + '-index',
hash_hmac.toString('base64url') +
'\n' +
crypto.publicEncrypt(info.public_key, key).toString('base64url') +
publicEncrypt(info.public_key, key).toString('base64url') +
'\n' +
crypto.publicEncrypt(info.public_key, iv).toString('base64url') +
publicEncrypt(info.public_key, iv).toString('base64url') +
'\n' +
Buffer.concat([cipher.update(path), cipher.final()]).toString('base64url') +
'\n',
@ -211,17 +232,16 @@ async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
for (let start = 0; ; start += info.split_size) {
if (start < size) {
stream_queue.add(() =>
fs
.createReadStream(plain_dir + '/' + path, {
start,
end: Math.min(start + info.split_size - 1, size - 1),
})
.pipe(crypto.createCipheriv(info.cipher_algorithm, key, iv))
.pipe(fs.createWriteStream(crypt_dir + '/' + get_crypt_filename(info, path, start))),
createReadStream(plain_dir + '/' + path, {
start,
end: Math.min(start + info.split_size - 1, size - 1),
})
.pipe(createCipheriv(info.cipher_algorithm, key, iv))
.pipe(createWriteStream(crypt_dir + '/' + get_crypt_filename(info, path, start))),
);
} else {
try {
fs.unlinkSync(crypt_dir + '/' + get_crypt_filename(info, path, start));
unlinkSync(crypt_dir + '/' + get_crypt_filename(info, path, start));
} catch {
break;
}
@ -232,21 +252,21 @@ async function encrypt({ plain: plain_dir, crypt: crypt_dir, filter }) {
return { added, deleted, updated };
}
function clean({ crypt: crypt_dir, passphrase }) {
export function clean({ crypt: crypt_dir, passphrase }) {
// READ CRYPT INDEX
const info = get_info(crypt_dir, passphrase);
// GET CRYPT FILES
const crypt_filenames = new Set();
for (const dirent of fs.readdirSync(crypt_dir, { withFileTypes: true })) {
for (const dirent of readdirSync(crypt_dir, { withFileTypes: true })) {
if (dirent.isFile() && dirent.name.endsWith('-data')) {
crypt_filenames.add(dirent.name);
}
}
// SKIP ALL FILES REFERRED TO BY AN INDEX
for (const item of info.index.values()) {
const key = crypto.privateDecrypt(info.private_key, item.key);
const iv = crypto.privateDecrypt(info.private_key, item.iv);
const decipher = crypto.createDecipheriv(info.cipher_algorithm, key, iv);
const key = privateDecrypt(info.private_key, item.key);
const iv = privateDecrypt(info.private_key, item.iv);
const decipher = createDecipheriv(info.cipher_algorithm, key, iv);
const path = Buffer.concat([decipher.update(item.path), decipher.final()]).toString();
for (let start = 0; ; start += info.split_size) {
const crypt_filename = get_crypt_filename(info, path, start);
@ -259,16 +279,16 @@ function clean({ crypt: crypt_dir, passphrase }) {
}
// DELETE UNUSED CRYPT FILES
for (const crypt_filename of crypt_filenames) {
fs.unlinkSync(crypt_dir + '/' + crypt_filename);
unlinkSync(crypt_dir + '/' + crypt_filename);
}
return { deleted: crypt_filenames };
}
async function decrypt({ plain: plain_dir, crypt: crypt_dir, filter, passphrase }) {
export async function decrypt({ plain: plain_dir, crypt: crypt_dir, filter, passphrase }) {
const added = new Set();
const deleted = new Set();
const updated = new Set();
fs.mkdirSync(plain_dir, { recursive: true });
mkdirSync(plain_dir, { recursive: true });
// READ CRYPT INDEX
const info = get_info(crypt_dir, passphrase);
// CONSTRUCT PLAIN INDEX
@ -277,15 +297,14 @@ async function decrypt({ plain: plain_dir, crypt: crypt_dir, filter, passphrase
// DELETE MISSING FILES
const path_hmac_lookup = new Map();
for (const path of plain_index.keys()) {
const path_hmac = crypto
.createHmac(info.hash_algorithm, info.hmac_key)
const path_hmac = createHmac(info.hash_algorithm, info.hmac_key)
.update(path)
.digest('base64url');
if (info.index.has(path_hmac)) {
path_hmac_lookup.set(path_hmac, path);
} else {
deleted.add(path);
fs.unlinkSync(plain_dir + '/' + path);
unlinkSync(plain_dir + '/' + path);
}
}
// UPDATE/ADD FILES
@ -296,8 +315,7 @@ async function decrypt({ plain: plain_dir, crypt: crypt_dir, filter, passphrase
target = added;
} else if (
Buffer.compare(
crypto
.createHmac(info.hash_algorithm, info.hmac_key)
createHmac(info.hash_algorithm, info.hmac_key)
.update(plain_index.get(path_hmac_lookup.get(path_hmac)).hash)
.digest(),
item.hash_hmac,
@ -307,30 +325,27 @@ async function decrypt({ plain: plain_dir, crypt: crypt_dir, filter, passphrase
} else {
continue;
}
const key = crypto.privateDecrypt(info.private_key, item.key);
const iv = crypto.privateDecrypt(info.private_key, item.iv);
const decipher = crypto.createDecipheriv(info.cipher_algorithm, key, iv);
const key = privateDecrypt(info.private_key, item.key);
const iv = privateDecrypt(info.private_key, item.iv);
const decipher = createDecipheriv(info.cipher_algorithm, key, iv);
const path = Buffer.concat([decipher.update(item.path), decipher.final()]).toString();
target.add(path);
fs.mkdirSync(plain_dir + '/' + path_.dirname(path), { recursive: true });
fs.writeFileSync(plain_dir + '/' + path, Buffer.alloc(0));
mkdirSync(plain_dir + '/' + dirname(path), { recursive: true });
writeFileSync(plain_dir + '/' + path, Buffer.alloc(0));
for (let start = 0; ; start += info.split_size) {
const file = crypt_dir + '/' + get_crypt_filename(info, path, start);
try {
fs.accessSync(file);
accessSync(file);
} catch {
break;
}
stream_queue.add(() =>
fs
.createReadStream(file)
.pipe(crypto.createDecipheriv(info.cipher_algorithm, key, iv))
.pipe(fs.createWriteStream(plain_dir + '/' + path, { flags: 'r+', start })),
createReadStream(file)
.pipe(createDecipheriv(info.cipher_algorithm, key, iv))
.pipe(createWriteStream(plain_dir + '/' + path, { flags: 'r+', start })),
);
}
}
await stream_queue.done();
return { added, deleted, updated };
}
module.exports = { init, encrypt, clean, decrypt };

3
package.json Normal file
View File

@ -0,0 +1,3 @@
{
"type": "module"
}

16
pass.js
View File

@ -1,12 +1,12 @@
const readline = require('readline');
const stream = require('stream');
const devnull = new stream.Writable({ write: (chunk, encoding, cb) => cb() });
import { createInterface } from 'readline';
import { Writable } from 'stream';
function get_pass(prompt) {
const devnull = new Writable({ write: (chunk, encoding, cb) => cb() });
export function get_pass(prompt) {
process.stdout.write(prompt);
return new Promise((res, rej) => {
const rl = readline
.createInterface({ input: process.stdin, output: devnull, terminal: true })
const rl = createInterface({ input: process.stdin, output: devnull, terminal: true })
.once('line', (line) => {
res(line);
rl.close();
@ -18,7 +18,7 @@ function get_pass(prompt) {
});
}
async function confirm_pass(prompt1, prompt2, error) {
export async function confirm_pass(prompt1, prompt2, error) {
for (;;) {
const pass1 = await get_pass(prompt1);
const pass2 = await get_pass(prompt2);
@ -28,5 +28,3 @@ async function confirm_pass(prompt1, prompt2, error) {
process.stdout.write(error + '\n');
}
}
module.exports = { get_pass, confirm_pass };