I might be stupid.
This commit is contained in:
parent
72230e07b0
commit
d0b9400212
11 changed files with 160 additions and 170 deletions
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@flashwave/assproc",
|
||||
"version": "0.2.0",
|
||||
"version": "0.3.0",
|
||||
"description": "Personal frontend asset processing tool",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
|
|
144
src/combine.js
144
src/combine.js
|
@ -1,92 +1,88 @@
|
|||
import fs from 'fs';
|
||||
import readline from 'readline';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { trim, trimStart, trimEnd } from './trim.js';
|
||||
const fs = require('fs');
|
||||
const readline = require('readline');
|
||||
const { join: pathJoin } = require('path');
|
||||
const { trim, trimStart, trimEnd } = require('./trim.js');
|
||||
|
||||
const combine = {
|
||||
folder: async (root, options) => {
|
||||
const macroPrefix = options.prefix || '#';
|
||||
const entryPoint = options.entry || '';
|
||||
exports.folder = async (root, options) => {
|
||||
const macroPrefix = options.prefix || '#';
|
||||
const entryPoint = options.entry || '';
|
||||
|
||||
root = fs.realpathSync(root);
|
||||
root = fs.realpathSync(root);
|
||||
|
||||
const included = [];
|
||||
const included = [];
|
||||
|
||||
const processFile = async fileName => {
|
||||
const fullPath = pathJoin(root, fileName);
|
||||
if(included.includes(fullPath))
|
||||
return '';
|
||||
included.push(fullPath);
|
||||
const processFile = async fileName => {
|
||||
const fullPath = pathJoin(root, fileName);
|
||||
if(included.includes(fullPath))
|
||||
return '';
|
||||
included.push(fullPath);
|
||||
|
||||
if(!fullPath.startsWith(root))
|
||||
throw `INVALID INCLUDED PATH: ${fullPath}`;
|
||||
if(!fs.existsSync(fullPath))
|
||||
throw `INCLUDED FILE NOT FOUND: ${fullPath}`;
|
||||
if(!fullPath.startsWith(root))
|
||||
throw `INVALID INCLUDED PATH: ${fullPath}`;
|
||||
if(!fs.existsSync(fullPath))
|
||||
throw `INCLUDED FILE NOT FOUND: ${fullPath}`;
|
||||
|
||||
const lines = readline.createInterface({
|
||||
input: fs.createReadStream(fullPath),
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
const lines = readline.createInterface({
|
||||
input: fs.createReadStream(fullPath),
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
|
||||
let output = '';
|
||||
let lastWasEmpty = false;
|
||||
let output = '';
|
||||
let lastWasEmpty = false;
|
||||
|
||||
if(options.showPath)
|
||||
output += "/* *** PATH: " + fullPath + " */\n";
|
||||
if(options.showPath)
|
||||
output += "/* *** PATH: " + fullPath + " */\n";
|
||||
|
||||
for await(const line of lines) {
|
||||
const lineTrimmed = trim(line);
|
||||
if(lineTrimmed === '')
|
||||
continue;
|
||||
for await(const line of lines) {
|
||||
const lineTrimmed = trim(line);
|
||||
if(lineTrimmed === '')
|
||||
continue;
|
||||
|
||||
if(line.startsWith(macroPrefix)) {
|
||||
const args = lineTrimmed.split(' ');
|
||||
const macro = trim(trimStart(args.shift(), macroPrefix));
|
||||
if(line.startsWith(macroPrefix)) {
|
||||
const args = lineTrimmed.split(' ');
|
||||
const macro = trim(trimStart(args.shift(), macroPrefix));
|
||||
|
||||
switch(macro) {
|
||||
case 'comment':
|
||||
break;
|
||||
switch(macro) {
|
||||
case 'comment':
|
||||
break;
|
||||
|
||||
case 'include': {
|
||||
const includePath = trimEnd(args.join(' '), ';');
|
||||
output += trim(await processFile(includePath));
|
||||
output += "\n";
|
||||
break;
|
||||
}
|
||||
|
||||
case 'vars':
|
||||
if(typeof options.vars !== 'object' || options.vars === null)
|
||||
break;
|
||||
|
||||
const bvSourceName = trimEnd(args.join(' '), ';');
|
||||
const bvSource = options.vars[bvSourceName];
|
||||
if(typeof bvSource !== 'objects' || bvSource === null)
|
||||
throw `INVALID VARS SOURCE: ${bvSourceName}`;
|
||||
|
||||
const bvProps = [];
|
||||
for(const bvName in bvSource)
|
||||
bvProps.push(`${bvName}: { value: ${JSON.stringify(bvSource[bvName])} }`);
|
||||
|
||||
if(Object.keys(bvProps).length > 0)
|
||||
output += `Object.defineProperties(${options.varsTarget}, { ${bvProps.join(', ')} });\n`;
|
||||
break;
|
||||
|
||||
default:
|
||||
output += line;
|
||||
output += "\n";
|
||||
break;
|
||||
case 'include': {
|
||||
const includePath = trimEnd(args.join(' '), ';');
|
||||
output += trim(await processFile(includePath));
|
||||
output += "\n";
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
output += line;
|
||||
output += "\n";
|
||||
|
||||
case 'vars':
|
||||
if(typeof options.vars !== 'object' || options.vars === null)
|
||||
break;
|
||||
|
||||
const bvSourceName = trimEnd(args.join(' '), ';');
|
||||
const bvSource = options.vars[bvSourceName];
|
||||
if(typeof bvSource !== 'objects' || bvSource === null)
|
||||
throw `INVALID VARS SOURCE: ${bvSourceName}`;
|
||||
|
||||
const bvProps = [];
|
||||
for(const bvName in bvSource)
|
||||
bvProps.push(`${bvName}: { value: ${JSON.stringify(bvSource[bvName])} }`);
|
||||
|
||||
if(Object.keys(bvProps).length > 0)
|
||||
output += `Object.defineProperties(${options.varsTarget}, { ${bvProps.join(', ')} });\n`;
|
||||
break;
|
||||
|
||||
default:
|
||||
output += line;
|
||||
output += "\n";
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
output += line;
|
||||
output += "\n";
|
||||
}
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
return output;
|
||||
};
|
||||
|
||||
return await processFile(entryPoint);
|
||||
},
|
||||
return await processFile(entryPoint);
|
||||
};
|
||||
|
||||
export combine;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import postcss from 'postcss';
|
||||
import combine from '../combine.js';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { strtr, shortHash, writeFile } from './utils.js';
|
||||
const postcss = require('postcss');
|
||||
const combine = require('../combine.js');
|
||||
const { join: pathJoin, dirname } = require('path');
|
||||
const { strtr, shortHash, writeFile } = require('../utils.js');
|
||||
|
||||
export const function(env) {
|
||||
exports = function(env) {
|
||||
const PREFIX = '@';
|
||||
const DEFAULT_ENTRY = 'main.css';
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { minify as htmlminify } from 'html-minifier-terser';
|
||||
import { join as pathJoin, dirname } from 'path';
|
||||
import { strtr, shortHash, writeFile } from './utils.js';
|
||||
const { minify: htmlminify } = require('html-minifier-terser');
|
||||
const { join: pathJoin, dirname } = require('path');
|
||||
const { strtr, shortHash, writeFile } = require('../utils.js');
|
||||
|
||||
export const function(env) {
|
||||
exports = function(env) {
|
||||
const MINIFY_OPTS = {
|
||||
collapseBooleanAttributes: true,
|
||||
collapseWhitespace: true,
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import swc from '@swc/core';
|
||||
import combine from '../combine.js';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { strtr, shortHash, writeFile } from './utils.js';
|
||||
const swc = require('@swc/core');
|
||||
const combine = require('../combine.js');
|
||||
const { minify: htmlminify } = require('html-minifier-terser');
|
||||
const { join: pathJoin, } = require('path');
|
||||
const { strtr, shortHash, writeFile } = require('../utils.js');
|
||||
|
||||
export const function(env) {
|
||||
exports = function(env) {
|
||||
const PREFIX = '#';
|
||||
const DEFAULT_ENTRY = 'main.js';
|
||||
const DEFAULT_VARS_TARGET = 'window';
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
import { exec as execLie } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { minify as htmlminify } from 'html-minifier-terser';
|
||||
import { join as pathJoin, dirname } from 'path';
|
||||
import { strtr, shortHash } from './utils.js';
|
||||
|
||||
export const function(env) {
|
||||
const exec = promisify(execLie);
|
||||
const exec = require('util').promisify(require('child_process').exec);
|
||||
const { minify: htmlminify } = require('html-minifier-terser');
|
||||
const { join: pathJoin, dirname } = require('path');
|
||||
const { strtr, shortHash, writeFile } = require('../utils.js');
|
||||
|
||||
exports = function(env) {
|
||||
const MINIFY_OPTS = {
|
||||
collapseBooleanAttributes: true,
|
||||
collapseWhitespace: true,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import fs from 'fs';
|
||||
import { join as pathJoin, dirname } from 'path';
|
||||
import { strtr, shortHash, writeFile } from './utils.js';
|
||||
const fs = require('fs');
|
||||
const { join: pathJoin, dirname } = require('path');
|
||||
const { strtr, shortHash, writeFile } = require('../utils.js');
|
||||
|
||||
export const function(env) {
|
||||
exports = function(env) {
|
||||
return {
|
||||
process: async (task, vars) => {
|
||||
let body = JSON.parse(fs.readFileSync(pathJoin(env.source, task.source)));
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import fs from 'fs';
|
||||
import { join as pathJoin } from 'path';
|
||||
const fs = require('fs');
|
||||
const { join: pathJoin } = require('path');
|
||||
|
||||
export const housekeep = path => {
|
||||
exports.housekeep = path => {
|
||||
const files = fs.readdirSync(path).map(fileName => {
|
||||
const stats = fs.statSync(pathJoin(path, fileName));
|
||||
return {
|
||||
|
|
110
src/index.js
110
src/index.js
|
@ -1,9 +1,9 @@
|
|||
import apCss from './handlers/css.js';
|
||||
import apHtml from './handlers/html.js';
|
||||
import apJs from './handlers/js.js';
|
||||
import apTwig from './handlers/twig.js';
|
||||
import apWebManifest from './handlers/webmanifest.js';
|
||||
import { housekeep } from './housekeep.js';
|
||||
const apCss = require('./handlers/css.js');
|
||||
const apHtml = require('./handlers/html.js');
|
||||
const apJs = require('./handlers/js.js');
|
||||
const apTwig = require('./handlers/twig.js');
|
||||
const apWebManifest = require('./handlers/webmanifest.js');
|
||||
const { housekeep } = require('./housekeep.js');
|
||||
|
||||
const DEFAULT_ENV = {
|
||||
debug: false,
|
||||
|
@ -23,74 +23,70 @@ const DEFAULT_ENV = {
|
|||
},
|
||||
};
|
||||
|
||||
const public = {
|
||||
process: async (env, tasks) => {
|
||||
if(typeof env.source !== 'string')
|
||||
throw 'env.source must be a path to the source directories';
|
||||
if(typeof env.public !== 'string')
|
||||
throw 'env.public must be a path to the root output directory';
|
||||
exports.process = async (env, tasks) => {
|
||||
if(typeof env.source !== 'string')
|
||||
throw 'env.source must be a path to the source directories';
|
||||
if(typeof env.public !== 'string')
|
||||
throw 'env.public must be a path to the root output directory';
|
||||
|
||||
if(typeof tasks !== 'object' || tasks === null)
|
||||
throw 'tasks must be a non-null object';
|
||||
if(typeof tasks !== 'object' || tasks === null)
|
||||
throw 'tasks must be a non-null object';
|
||||
|
||||
env = { ...DEFAULT_ENV, ...env };
|
||||
env = { ...DEFAULT_ENV, ...env };
|
||||
|
||||
const types = {
|
||||
js: new apJs(env),
|
||||
css: new apCss(env),
|
||||
webmanifest: new apWebManifest(env),
|
||||
html: new apHtml(env),
|
||||
twig: new apTwig(env),
|
||||
};
|
||||
const types = {
|
||||
js: new apJs(env),
|
||||
css: new apCss(env),
|
||||
webmanifest: new apWebManifest(env),
|
||||
html: new apHtml(env),
|
||||
twig: new apTwig(env),
|
||||
};
|
||||
|
||||
const order = env.order ?? Object.keys(types);
|
||||
if(!Array.isArray(order))
|
||||
throw 'env.order must be undefined or an array';
|
||||
const order = env.order ?? Object.keys(types);
|
||||
if(!Array.isArray(order))
|
||||
throw 'env.order must be undefined or an array';
|
||||
|
||||
const vars = env.vars ?? {};
|
||||
if(typeof vars !== 'object' || vars === null)
|
||||
throw 'env.vars must be a non-null object';
|
||||
const vars = env.vars ?? {};
|
||||
if(typeof vars !== 'object' || vars === null)
|
||||
throw 'env.vars must be a non-null object';
|
||||
|
||||
const files = {};
|
||||
const files = {};
|
||||
|
||||
for(const type of order) {
|
||||
if(!(type in types))
|
||||
throw `${type} is not a supported build task type`;
|
||||
for(const type of order) {
|
||||
if(!(type in types))
|
||||
throw `${type} is not a supported build task type`;
|
||||
|
||||
const typeTasks = tasks[type];
|
||||
if(!Array.isArray(typeTasks))
|
||||
throw 'children of the tasks object must be arrays';
|
||||
const typeTasks = tasks[type];
|
||||
if(!Array.isArray(typeTasks))
|
||||
throw 'children of the tasks object must be arrays';
|
||||
|
||||
console.info(`Building '${type}' assets...`);
|
||||
console.info(`Building '${type}' assets...`);
|
||||
|
||||
const handler = types[type];
|
||||
const handler = types[type];
|
||||
|
||||
for(const task of typeTasks) {
|
||||
console.info(` => ${task.source}...`);
|
||||
const path = await handler.process(task, vars);
|
||||
for(const task of typeTasks) {
|
||||
console.info(` => ${task.source}...`);
|
||||
const path = await handler.process(task, vars);
|
||||
|
||||
if(typeof task.varsName === 'string')
|
||||
vars[task.varsGroup ?? ''][task.varsName] = path;
|
||||
if(typeof task.varsName === 'string')
|
||||
vars[task.varsGroup ?? ''][task.varsName] = path;
|
||||
|
||||
files[task.source] = path;
|
||||
}
|
||||
files[task.source] = path;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const hkDirs = env.housekeep ?? [];
|
||||
if(!Array.isArray(hkDirs))
|
||||
throw 'env.housekeep must be an array of folder paths';
|
||||
const hkDirs = env.housekeep ?? [];
|
||||
if(!Array.isArray(hkDirs))
|
||||
throw 'env.housekeep must be an array of folder paths';
|
||||
|
||||
if(hkDirs.length > 0) {
|
||||
console.info(`Doing some housekeeping...`);
|
||||
for(const path of hkDirs) {
|
||||
console.info(` => ${path}...`);
|
||||
housekeep(path);
|
||||
}
|
||||
if(hkDirs.length > 0) {
|
||||
console.info(`Doing some housekeeping...`);
|
||||
for(const path of hkDirs) {
|
||||
console.info(` => ${path}...`);
|
||||
housekeep(path);
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
},
|
||||
return files;
|
||||
};
|
||||
|
||||
export public;
|
||||
|
|
|
@ -16,6 +16,6 @@ const trim = (str, chars = " \n\r\t\v\0", flags = 0) => {
|
|||
return str;
|
||||
};
|
||||
|
||||
export const trimStart = (str, chars) => trim(str, chars, 0x01);
|
||||
export const trimEnd = (str, chars) => trim(str, chars, 0x02);
|
||||
export const trim = (str, chars) => trim(str, chars, 0x03);
|
||||
exports.trimStart = (str, chars) => trim(str, chars, 0x01);
|
||||
exports.trimEnd = (str, chars) => trim(str, chars, 0x02);
|
||||
exports.trim = (str, chars) => trim(str, chars, 0x03);
|
||||
|
|
12
src/utils.js
12
src/utils.js
|
@ -1,18 +1,18 @@
|
|||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import { dirname } from 'path';
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const { dirname } = require('path');
|
||||
|
||||
export const strtr = (str, replacements) => str.toString().replace(
|
||||
exports.strtr = (str, replacements) => str.toString().replace(
|
||||
/{([^}]+)}/g, (match, key) => replacements[key] || match
|
||||
);
|
||||
|
||||
export const shortHash = text => {
|
||||
exports.shortHash = text => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.update(text);
|
||||
return hash.digest('hex').substring(0, 8);
|
||||
};
|
||||
|
||||
export const writeFile = (path, data) => {
|
||||
exports.writeFile = (path, data) => {
|
||||
const folderPath = dirname(path);
|
||||
if(!fs.existsSync(folderPath))
|
||||
fs.mkdirSync(folderPath, { recursive: true });
|
||||
|
|
Loading…
Reference in a new issue