diff --git a/lib/caching.js b/lib/caching.js index b1466ca..62c32a2 100644 --- a/lib/caching.js +++ b/lib/caching.js @@ -3,8 +3,17 @@ const fs = require("fs"), config_path = "./config/caching_dump.json", cache_dump = JSON.parse(fs.readFileSync(config_path)), cache_dir = "./.cache", - cache = {}; // TODO: Must read from dump again -if (cache_dump != null && cache_dump["last"] != null) cache = cache_dump["last"]; + cache = {}; +var logger = require("winston"); +if (cache_dump != null && cache_dump["last"] != null) cache = cache_dump["last"]; // read the data from the file dump + +/** + * Sets the logger for logging + * @param {Winston Logger} newLogger + */ +exports.setLogger = function(newLogger) { + logger = newLogger; +} /** * Returns the data from files that were cached @@ -56,15 +65,6 @@ exports.cache = function(filename, data) { }); }); // write the data asynchronously to the file }; -var logger = require("winston"); - -/** - * Sets the logger for logging - * @param {Winston Logger} newLogger - */ -exports.setLogger = function(newLogger) { - logger = newLogger; -} /** * Returns if the file is already cached @@ -94,6 +94,6 @@ exports.isCached = function(filename) { */ exports.cleanup = function() { logger.verbose("Dumping cache into cache_dump file"); - cache_dump["last"] = cache; - fs.writeFileSync(config_path, JSON.stringify(cache_dump)); + cache_dump["last"] = cache; // append the cache to the dump object + fs.writeFileSync(config_path, JSON.stringify(cache_dump)); // write the dump data to the file } diff --git a/lib/preprocessor.js b/lib/preprocessor.js index 2cdc8dd..541c01b 100644 --- a/lib/preprocessor.js +++ b/lib/preprocessor.js @@ -30,28 +30,28 @@ exports.setLogger = function(newLogger) { exports.getProcessed = function(filename) { try { logger.debug("Processing File %s", filename); - var extension = utils.getExtension(filename); + var extension = utils.getExtension(filename); // use the utils function to get the files extension var data = null; - if (caching.isCached(filename)) return caching.getCached(filename) + if (caching.isCached(filename)) return caching.getCached(filename) // return the cached file if it exists logger.debug("File is not cached. Processing..."); switch (pp_config[extension]) { case "sass": logger.debug("Processing sass %s", filename); - data = Buffer.from(pp_sass.renderSync({ + data = Buffer.from(pp_sass.renderSync({ // use the sass preprocessor file: filename }).css).toString("utf-8"); break; case "html": logger.debug("Processing html %s", filename); - data = pp_html.formatHtml(filename); + data = pp_html.formatHtml(filename); // use the html-preprocessor break; default: logger.debug("No processor found for %s. Returning data.", filename); - return fs.readFileSync(filename); + return fs.readFileSync(filename); // just read the data from the file } - caching.cache(filename, data); + caching.cache(filename, data); // cache the file for faster access next time logger.debug("Cached file %s", filename); - return data; + return data; // return the data } catch (error) { logger.error(error); return "Processing Error"; diff --git a/package.json b/package.json new file mode 100644 index 0000000..3bfa8eb --- /dev/null +++ b/package.json @@ -0,0 +1,14 @@ +{ + "license": "GPL-v3", + "dependencies": [ + "args-parser", + "https", + "jquery", + "jsdom", + "node-sass", + "perfy", + "vuejs", + "winston-daily-rotate-file", + "winston" + ] +} diff --git a/server.js b/server.js index 655dafb..7b6ece6 100644 --- a/server.js +++ b/server.js @@ -136,9 +136,6 @@ function getResponse(uri) { logger.verbose("Found route: "+JSON.stringify(route)); if (!route) return ["Not Allowed", "text/plain"]; // return not allowed if no route was found return [gp(mount || path.join(route["path"],uri)), route["mime"]]; // get processed output (done by preprocessor) - // test the extension for differend file types. - logger.verbose({'msg': 'Error', 'path': uri}); - return ["Error with url", "text/plain"]; // return an error if above has not returned } catch (error) { logger.error(error); return ["Error", "text/plain"];