fixing up log entries
authorPaul J R <me@pjr.cc>
Mon, 24 Jun 2013 22:31:30 +0000 (08:31 +1000)
committerPaul J R <me@pjr.cc>
Mon, 24 Jun 2013 22:31:30 +0000 (08:31 +1000)
TODO [new file with mode: 0644]
lib/cache.js
lib/config.js
lib/log.js
lib/maintain.js
lib/repo-apt.js
lib/repo-yum.js
lib/router.js

diff --git a/TODO b/TODO
new file mode 100644 (file)
index 0000000..56a27c9
--- /dev/null
+++ b/TODO
@@ -0,0 +1,6 @@
+TODO
+====
+
+- fix the log.* entries that need to be errors
+- metadata check on maintenance routines
+- 
\ No newline at end of file
index 588bc73..4f98d3c 100644 (file)
@@ -3,10 +3,11 @@ var http = require("http");
 var url = require("url");
 var path = require("path");
 var crypto = require("crypto");
+var log = require("./log.js");
 
 function upstreamRequest(unify) {
        // first do a head request
-       console.log("upsteram as ", unify.requestFor);
+       log.debug("upsteram as ", unify.requestFor);
        
        var endData = false;
        var xpath = "";
@@ -21,12 +22,12 @@ function upstreamRequest(unify) {
        
        // not doing this properly yet...
        if(typeof global.repoproxy.downloads[unify.fullFilePath] != undefined && global.repoproxy.downloads[unify.fullFilePath] == 1) {
-               console.log("request for file thats being downloaded already, doing inline request");
+               log.debug("request for file thats being downloaded already, doing inline request");
                inlineService(unify);
                return;
        }
        
-       console.log("sending off to '%s'", xpath);
+       log.debug("sending off to '%s'", xpath);
        
        var headReq = url.parse(xpath);
        headReq["method"] = "HEAD";
@@ -35,7 +36,7 @@ function upstreamRequest(unify) {
                //res.setEncoding("utf8");
                
                if(!endData) {
-                       console.log("status code is ", typeof res.statusCode);
+                       log.debug("status code is ", typeof res.statusCode);
                        switch(res.statusCode) {
                        // TODO: this 301 directory redirect thing needs to work better
                        case 301:
@@ -46,11 +47,11 @@ function upstreamRequest(unify) {
                                var against = against_t.substr(against_t.length-4);
                                
                                if(loc == against) {
-                                       console.log("got a redirect, upstream for loc => loc/ assuming its a directory");
+                                       log.debug("got a redirect, upstream for loc => loc/ assuming its a directory");
                                        makeCacheDir(unify);
                                        unify.b.writeHead(302, { "Location": unify.originalReq + "/" });
                                } else {
-                                       console.log("checked '%s' against '%s', was false, sending 404", loc, against);
+                                       log.debug("checked '%s' against '%s', was false, sending 404", loc, against);
                                        unify.b.writeHead(404, {"Content-Type": "text/plain"});
                                        unify.b.write("404 Not Found\n");
                                }
@@ -72,7 +73,7 @@ function upstreamRequest(unify) {
                                } else {
                                        // this is where it gets ugly
                                        var filesize = res.headers["content-length"];
-                                       console.log("do ugly write: ", unify);
+                                       log.debug("do ugly write: ", unify);
                                        //unify.b.write(data);
                                        var metafilename = unify.fullPathDirName + "/.meta."+ path.basename(unify.requestFor) +".filesize";
                                        var metafile = fs.createWriteStream(metafilename);
@@ -83,23 +84,23 @@ function upstreamRequest(unify) {
                                }
                                break;
                        default:
-                               console.log(".... data");
+                               log.debug(".... data");
                                //unify.b.write(data);
                        }
                }               
-               //console.log("res is now ", res);
+               //log.debug("res is now ", res);
        });
        
        getup.end();
        
-       //console.log("getup: ", getup);
+       //log.debug("getup: ", getup);
 }
 
 exports.upstreamRequest = upstreamRequest;
 
 function getAndService(unify, xpath, filesize) {
        
-       console.log("calling in here with filesize, ", filesize)
+       log.debug("calling in here with filesize, ", filesize)
        unify.b.writeHead(200, {'Content-Length' : filesize});
 
        
@@ -110,25 +111,25 @@ function getAndService(unify, xpath, filesize) {
 
            var file = fs.createWriteStream(unify.fullFilePath);
        
-           //console.log("res: ", res);
+           //log.debug("res: ", res);
        
            //res.setEncoding("utf8");
        
            res.on("data", function(data) {
-                   //console.log("chunk");
+                   //log.debug("chunk");
                    file.write(data);
                    unify.b.write(data);
            });
        
            res.on("end", function() {
-                   console.log("end...");
+                   log.debug("end...");
                    unify.b.end();
                    file.end();
                    global.repoproxy.downloads[unify.fullFilePath] = 0;
            });
            
            res.on("error", function(err) {
-               console.log("res threw error... ", err);
+               log.debug("res threw error... ", err);
            });
        });
 }
@@ -149,7 +150,7 @@ function inlineService(unify) {
                unify.b.writeHead(200, {"Content-Length" : fsize });
                
                // now we go into the file reading loop.
-               console.log("start of inline services");
+               log.debug("start of inline services");
                // we loop every 0.5s and do our thing
                
                function sendPieces() {
@@ -164,7 +165,7 @@ function inlineService(unify) {
                                                var rs = fs.createReadStream(unify.fullFilePath, {start: sentSoFar, end: stats["size"]});
                                                
                                                rs.on("data", function(thisdata) {
-                                                       //console.log("inline chunk: ", thisdata.length);
+                                                       //log.debug("inline chunk: ", thisdata.length);
                                                        unify.b.write(thisdata);
                                                });
                                                
@@ -192,7 +193,7 @@ function inlineService(unify) {
                                                }
                                        }
                                } else {
-                                       console.log("inline service - we're in a very bad place");
+                                       log.debug("inline service - we're in a very bad place");
                                }
                        });
                        
@@ -245,7 +246,7 @@ function checkFile(unify, callback) {
                                        var rfsize = stats["size"];
                                        if(rfsize != fsize.trim()) {
                                                // remove the file and start again
-                                               console.log("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]);
+                                               log.debug("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]);
                                                try {
                                                        fs.unlink(metafilename, function(){
                                                                fs.unlink(unify.fullFilePath, function(){
@@ -263,7 +264,7 @@ function checkFile(unify, callback) {
                                });
                        });
                } else {
-                       console.log("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath);
+                       log.debug("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath);
                        unify.b.writeHead(200, {"Content-Length" : unify.fileSize})
                        callback();
                }
@@ -271,17 +272,17 @@ function checkFile(unify, callback) {
 }
 
 function makeCacheDir(path) {
-       console.log("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName);
+       log.debug("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName);
        
        var startAt = path.topFullPath;
        var nextbits = path.subPathDirName.split("/");
        for(var i=0; i < nextbits.length; i++) {
                startAt += "/" + nextbits[i];
-               console.log("attempt mkdir on '%s'", startAt);
+               log.debug("attempt mkdir on '%s'", startAt);
                try {
                        fs.mkdirSync(startAt);
                } catch(e) {
-                       //console.log("e in mkdir, ", e);
+                       //log.debug("e in mkdir, ", e);
                }
        }
        //process.exit(0);
@@ -294,7 +295,7 @@ function serviceDirectory(unify) {
        res.write("<html><h1>Directory listing for " + unify.originalReq + "</h1><hr><pre>");
        if(unify.originalReq != "/") res.write("<a href=\"..\">Parent</a>\n\n");
        fs.readdir(unify.fullFilePath, function(err, files) {
-               console.log("doing directory listing on: ", unify.fullFilePath);
+               log.debug("doing directory listing on: ", unify.fullFilePath);
                if(err == null) {
                        
                        // TODO: make this work asynchronously...
@@ -317,7 +318,7 @@ function serviceDirectory(unify) {
                                                nfiles++;
                                        }
                                } else {
-                                       console.log("ignoring file, ", files[i]);
+                                       log.debug("ignoring file, ", files[i]);
                                }
                        }
                        
@@ -340,7 +341,7 @@ function moveToCleanup(file_or_dir) {
        var encoded = (++global.repoproxy.fileid).toString();
        var toloc = cleanup + "/" + ctime.toString() + "." + encoded;
        
-       //console.log("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
+       //log.debug("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
        
        fs.renameSync(file_or_dir.replace(/\/$/, ""), toloc);
 }
index 8d6d2fc..8af15f0 100644 (file)
@@ -9,6 +9,7 @@ exports.loadConfig = function (conffile) {
        global.repoproxy.repo = new Object();
        global.repoproxy.scancache = 1;
        global.repoproxy.downloads = new Object();
+       global.repoproxy.loglevel = 3;
 
        // set a global file id for file trashing
        //global.repoproxy.fileid = new Object();
@@ -33,43 +34,46 @@ exports.loadConfig = function (conffile) {
                // parse the line
                switch(line_real[0]) {
                case "repo":
-                       
                        // TODO: VALIDATE!
-                       console.log("Adding repo: '/%s' type '%s' from '%s', with update interval of '%s' days, and expire time of '%s' days.", line_real[1], line_real[2], line_real[3]+":"+line_real[4], line_real[5], line_real[6]);
+                       log.info("Adding repo: '/%s' type '%s' from '%s', with update interval of '%s' days, and expire time of '%s' days.", line_real[1], line_real[2], line_real[3]+":"+line_real[4], line_real[5], line_real[6]);
                        var thisrepo = { type : line_real[2], url: line_real[3]+":"+line_real[4], updateinterval: line_real[5], expiretime: line_real[6] };
                        global.repoproxy.repo[line_real[1]] = thisrepo;
-                       
                        break;
+                       
                case "cachedir":
                        var tmppath = line_real[1].replace(/\/+/g, "/");
-                       console.log("Cache dir set to: ", tmppath);
+                       log.info("Cache dir set to: ", tmppath);
                        global.repoproxy.cacheDir = tmppath;
                        break;
+                       
                case "listenport":
-                       console.log("Port set to: ", line_real[1]);
+                       log.info("Port set to: ", line_real[1]);
                        global.repoproxy.listenPort = line_real[1];
                        break;
+                       
                case "cachescan":
-                       console.log("Set cache scan rate to: '%s' hours", line_real[1]);
+                       log.info("Set cache scan rate to: '%s' hours", line_real[1]);
                        global.repoproxy.scancache = parseInt(line_real[1]);
                        if(global.repoproxy.scancache == 0) {
-                               console.log("Cache scan rate didnt make sense, it was 0, and should be at least 1 - it is set to 24, but you should check this setting");
+                               log.info("Cache scan rate didnt make sense, it was 0, and should be at least 1 - it is set to 24, but you should check this setting");
                                global.repoproxy.scancache = 24;
                        }
                        break;
+                       
                case "loggerlevel":
                        global.repoproxy.loglevel = parseInt(line_real[1]);
-                       console.log("log level set to: ", global.repoproxy.loglevel);
+                       log.info("log level set to: ", global.repoproxy.loglevel);
                        break;
+                       
                default:
                        if(line_real[0] != "") {
-                               console.log("WARNING Invalid line in configuration file ignored: '%s'", line_one);
+                               log.info("WARNING Invalid line in configuration file ignored: '%s'", line_one);
                        }
                }
        }
        
        createCacheStructure();
-       log.testLogSettings();
+       //log.testLogSettings();
 }
 
 
@@ -77,40 +81,40 @@ exports.loadConfig = function (conffile) {
 function createCacheStructure() {
        try {
                var state = fs.statSync(global.repoproxy.cacheDir);
-               //console.log("state is:", state);
+               //log.info("state is:", state);
        } catch(e) {
-               //console.log("try failure of cache dir stat ", e);
+               //log.info("try failure of cache dir stat ", e);
                try {
                        fs.mkdirSync(global.repoproxy.cacheDir);
                } catch(ex) {
-                       console.log("ERROR: failure to create cache directory, '%s'", global.repoproxy.cacheDir);
+                       log.error("failure to create cache directory, '%s'", global.repoproxy.cacheDir);
                }
        }
        
        try {
                var state = fs.statSync(global.repoproxy.cacheDir + "/.cleanup");
-               //console.log("state is:", state);
+               //log.info("state is:", state);
        } catch(e) {
                try {
                        fs.mkdirSync(global.repoproxy.cacheDir + "/.cleanup");
                } catch(ex) {
-                       console.log("ERROR: cant create cleanup directory, '%s'", global.repoproxy.cacheDir + "/.cleanup");
+                       log.error("cant create cleanup directory, '%s'", global.repoproxy.cacheDir + "/.cleanup");
                }
        }
        
-       //console.log("next: ", global.repoproxy.repo);
+       //log.info("next: ", global.repoproxy.repo);
        for(var index in global.repoproxy.repo) {
                var fullDir = global.repoproxy.cacheDir + "/" + index;
-               //console.log("on end, ", fullDir);
+               //log.info("on end, ", fullDir);
                try {
                        var state = fs.statSync(fullDir);
-                       //console.log("state is:", state);
+                       //log.info("state is:", state);
                } catch(e) {
                        try {
-                               //console.log("attempted to create cache dir, ", fullDir);
+                               //log.info("attempted to create cache dir, ", fullDir);
                                fs.mkdirSync(fullDir);
                        } catch(ex) {
-                               console.log("ERROR: failed to create cache directory, '%s' for '%s'", fullDir, index);
+                               log.error("failed to create cache directory, '%s' for '%s'", fullDir, index);
                        }
                }
        }
index 24f31d2..6cb7e21 100644 (file)
@@ -20,7 +20,7 @@ function info() {
        //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
        arguments["0"] = "[??INFO??] ("+ts+"): "+orig;
        
-       if(global.repoproxy.loglevel >= 3) {
+       if(typeof global.repoproxy.loglevel == "undefined" || global.repoproxy.loglevel >= 3) {
                sendLog.apply(this, arguments);
        }
 }
@@ -31,8 +31,10 @@ function debug() {
        //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
        arguments["0"] = "[^^DEBUG^^] ("+ts+"): "+orig;
        
-       if(global.repoproxy.loglevel >= 4) {
-               sendLog.apply(this, arguments); 
+       if(typeof global.repoproxy.loglevel != "undefined") {
+               if(global.repoproxy.loglevel >= 3) {
+                       sendLog.apply(this, arguments);
+               }       
        }
 }
 
@@ -42,7 +44,7 @@ function warning() {
        //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
        arguments["0"] = "[!!WARNING!!] ("+ts+"): "+orig;
        
-       if(global.repoproxy.loglevel >= 2) {
+       if(typeof global.repoproxy.loglevel == "undefined" || global.repoproxy.loglevel >= 2) {
                sendLog.apply(this, arguments);
        }
 }
@@ -53,7 +55,7 @@ function error() {
        //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
        arguments["0"] = "[**ERROR**] ("+ts+"): "+orig;
        
-       if(global.repoproxy.loglevel >= 1) {
+       if(typeof global.repoproxy.loglevel == "undefined" || global.repoproxy.loglevel >= 1) {
                sendLog.apply(this, arguments);         
        }       
 }
index 46c3ddc..fe19a00 100644 (file)
@@ -4,25 +4,26 @@ var url = require("url");
 var path = require("path");
 var repoapt = require("./repo-apt.js");
 var repoyum = require("./repo-yum.js");
+var log = require("./log.js");
 
 function maintainCache() {
        // TODO i should check that im already running here and exit if i am
-       console.log("Cache maintainence routine starting...");
+       log.debug("Cache maintainence routine starting...");
        for(var index in global.repoproxy.repo) {
-               //console.log("start cleanup in ", index);
-               //console.log("cleanup array ", global.repoproxy.repo[index]);
+               //log.debug("start cleanup in ", index);
+               //log.debug("cleanup array ", global.repoproxy.repo[index]);
                switch(global.repoproxy.repo[index]["type"]) {
                case "apt":
-                       console.log("Scanning '%s' as apt", index);
+                       log.debug("Scanning '%s' as apt", index);
                        var walkin = path.normalize(global.repoproxy.cacheDir + "/" + index);
                        walkDir(walkin, function(err, list) {
                                repoapt.maintain(index, global.repoproxy.repo[index], list);
                        })
                        break;
                case "yum":
-                       //console.log("Scanning '%s' as apt", index);
+                       //log.debug("Scanning '%s' as apt", index);
                        //repoyum.maintain(global.repoproxy.repo[index]);
-                       console.log("Scanning '%s' as yum", index);
+                       log.debug("Scanning '%s' as yum", index);
                        var walkin = path.normalize(global.repoproxy.cacheDir + "/" + index);
                        walkDir(walkin, function(err, list) {
                                repoyum.maintain(index, global.repoproxy.repo[index], list);
@@ -31,7 +32,7 @@ function maintainCache() {
                        break;
                }
        }
-       console.log("Cache maintainence routine ended...");
+       log.debug("Cache maintainence routine ended...");
 }
 
 exports.startTimer = function() {
index de2c1c7..814f260 100644 (file)
@@ -2,14 +2,15 @@ var fs = require("fs");
 var http = require("http");
 var url = require("url");
 var path = require("path");
+var log = require("./log.js");
 
 function maintain(name, repoinfo, files) {
-       //console.log("doing apt clean for ", repoinfo);
+       //log.debug("doing apt clean for ", repoinfo);
        
        var topdir = path.normalize(global.repoproxy.cacheDir + "/" + name + "/");
        
        function fileCheck(i) {
-               console.log("checking file: ", files[i]);
+               log.debug("checking file: ", files[i]);
                
                var cfile = files[i].replace(topdir, "");
                var ctime_t = new Date();
@@ -20,26 +21,26 @@ function maintain(name, repoinfo, files) {
                // file checks go here
                if(typeof global.repoproxy.downloads[files[i]] != "undefined" && global.repoproxy.downloads[files[i]] == 1) {
                        // ignore this file as its being downloaded
-                       console.log("not checking file because its downloading ", cfile);
+                       log.debug("not checking file because its downloading ", cfile);
                        if(typeof files[i+1] != "undefined") fileCheck(i+1);
                } else {
                        fs.stat(files[i], function(err, stats) {
                                
-                               //console.log("deep check", cfile);
+                               //log.debug("deep check", cfile);
                                if(cfile.match(/.*dists\/.*/) != null) {
                                        // its a dist file, probably, check age and erase if necessary
                                        if(stats["mtime"].getTime() < time_ui) {
                                                // erase file
-                                               console.log("unlinking file for time_ui: ", files[i])
+                                               log.debug("unlinking file for time_ui: ", files[i])
                                                //fs.unlink(files[i]);
                                        } else {
-                                               //console.log("times for file '%s', '%s', '%s', '%s'", cfile, time_ui, time_et, stats["mtime"].getTime());
+                                               //log.debug("times for file '%s', '%s', '%s', '%s'", cfile, time_ui, time_et, stats["mtime"].getTime());
                                        }
                                        
                                } else {
                                        // its just some other file, check the read time
                                        if(stats["atime"].getTime() < time_et) {
-                                               console.log("unlinking file for time_et: ", files[i]);
+                                               log.debug("unlinking file for time_et: ", files[i]);
                                        }
                                }
                                if(typeof files[i+1] != "undefined") fileCheck(i+1);                            
@@ -51,7 +52,7 @@ function maintain(name, repoinfo, files) {
        
        
        if(typeof files[0] != 'undefined') fileCheck(0)
-       else console.log("Skipping (apt) file check as there are none... apprently?");
+       else log.debug("Skipping (apt) file check as there are none... apprently?");
 }
 
 exports.maintain = maintain;
\ No newline at end of file
index 25db170..cc588e1 100644 (file)
@@ -52,13 +52,13 @@ function maintain(name, repoinfo, files) {
                                log.debug("ctime is ", atime);
                                
                                var age = curtime - atime;
-                               //console.log("age is (%d) for (%d)", age, expireAge);
+                               //log.debug("age is (%d) for (%d)", age, expireAge);
                                if(age > expireAge) {
                                        // TODO: cleanup singular file
                                        // TODO: cleanup meta too, fuck me
-                                       //console.log("clean up file \n", files[i]);
+                                       //log.debug("clean up file \n", files[i]);
                                        var metafile = files[i].replace(/(.*)\/(.[^\/]+$)/, "$1/.meta.$2.filesize");
-                                       //console.log("meta for this file is \n", nfile);
+                                       //log.debug("meta for this file is \n", nfile);
                                        cache.moveToCleanup(files[i]);
                                        cache.moveToCleanup(metafile);
                                        log.info("Sending expired file to trash for cleanup (%s)", files[i]);
index 63622f7..5767d26 100644 (file)
@@ -6,9 +6,9 @@ var log = require("./log.js");
 
 exports.routeRequest = function(req, res) {
        // first, unify the request
-       console.log("request: ", req.url);
+       log.debug("request: ", req.url);
        var thisQuery = unifyRequest(req, res, function(unified) {
-               console.log("unified request is ", unified);
+               log.debug("unified request is ", unified);
                if(unified.requestFor == "/favicon.ico") {
                        unified.b.writeHead(404, {"Content-Type": "text/plain"});
                        unified.b.write("404 Not Found\n");
@@ -20,12 +20,12 @@ exports.routeRequest = function(req, res) {
                        } else if(unified.isDirectory) {
                                cache.serviceDirectory(unified);
                        } else {
-                               console.log("ERROR: something went majorly wrong with something, ", unified);
+                               log.debug("ERROR: something went majorly wrong with something, ", unified);
                        }
                } else {
                        // it doesnt exist yet, so we send it to the cache service if it matches an upstream service
                        if(typeof global.repoproxy.repo[unified.topPath] != "undefined") {
-                               console.log("file doesnt exist, upstream we go: ", unified);
+                               log.debug("file doesnt exist, upstream we go: ", unified);
                                cache.upstreamRequest(unified);
                        } else {
                                unified.b.writeHead(404, {"Content-Type": "text/plain"});