var http = require("http");
var url = require("url");
var path = require("path");
+var crypto = require("crypto");
function upstreamRequest(unify) {
// first do a head request
});
}
-exports.serviceDirectory = serviceDirectory;
\ No newline at end of file
+function moveToCleanup(file_or_dir) {
+ // err..?
+ var cleanup = global.repoproxy.cacheDir + "/.cleanup";
+ var ctime = new Date().getTime();
+ var encoded = (++global.repoproxy.fileid).toString();
+ var toloc = cleanup + "/" + ctime.toString() + "." + encoded;
+
+ //console.log("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
+
+ fs.renameSync(file_or_dir.replace(/\/$/, ""), toloc);
+}
+
+function cleanupRoutine() {
+
+}
+
+
+exports.serviceDirectory = serviceDirectory;
+exports.moveToCleanup = moveToCleanup;
var fs = require("fs");
+var log = require("./log.js");
exports.loadConfig = function (conffile) {
global.repoproxy.repo = new Object();
global.repoproxy.scancache = 1;
global.repoproxy.downloads = new Object();
+
+ // set a global file id for file trashing
+ //global.repoproxy.fileid = new Object();
+ global.repoproxy.fileid = 1;
+
+
var confFileData = fs.readFileSync(conffile, "utf8");
global.repoproxy.scancache = 24;
}
break;
+ case "loggerlevel":
+ global.repoproxy.loglevel = parseInt(line_real[1]);
+ console.log("log level set to: ", global.repoproxy.loglevel);
+ break;
default:
if(line_real[0] != "") {
console.log("WARNING Invalid line in configuration file ignored: '%s'", line_one);
}
createCacheStructure();
+ log.testLogSettings();
}
+
function createCacheStructure() {
try {
var state = fs.statSync(global.repoproxy.cacheDir);
//console.log("state is:", state);
} catch(e) {
+ //console.log("try failure of cache dir stat ", e);
try {
fs.mkdirSync(global.repoproxy.cacheDir);
- fs.mkdirSync(global.repoproxy.cacheDir + "/.cleanup");
} catch(ex) {
console.log("ERROR: failure to create cache directory, '%s'", global.repoproxy.cacheDir);
}
}
try {
- fs.mkdirSync(global.repoproxy.cacheDir + "/.cleanup");
- } catch(ex) {
- console.log("ERROR: cant create cleanup directory, '%s'", global.repoproxy.cacheDir + "/.cleanup");
+ var state = fs.statSync(global.repoproxy.cacheDir + "/.cleanup");
+ //console.log("state is:", state);
+ } catch(e) {
+ try {
+ fs.mkdirSync(global.repoproxy.cacheDir + "/.cleanup");
+ } catch(ex) {
+ console.log("ERROR: cant create cleanup directory, '%s'", global.repoproxy.cacheDir + "/.cleanup");
+ }
}
- console.log("next: ", global.repoproxy.repo);
+ //console.log("next: ", global.repoproxy.repo);
for(var index in global.repoproxy.repo) {
var fullDir = global.repoproxy.cacheDir + "/" + index;
- console.log("on end, ", fullDir);
+ //console.log("on end, ", fullDir);
try {
var state = fs.statSync(fullDir);
- console.log("state is:", state);
+ //console.log("state is:", state);
} catch(e) {
try {
- console.log("attempted to create cache dir, ", fullDir);
+ //console.log("attempted to create cache dir, ", fullDir);
fs.mkdirSync(fullDir);
} catch(ex) {
console.log("ERROR: failed to create cache directory, '%s' for '%s'", fullDir, index);
--- /dev/null
+
+
+/*
+ * # level for logging (1 = error, 2 = warning, 3=info, 4=debug);
+loggerlevel:4
+
+ */
+function testLogSettings() {
+ var curtime = new Date().getTime();
+ debug("This is a DEBUG TeST (%d)", curtime);
+ info("This is an Info TEST (%d)", curtime);
+ warning("This is a WARN test (%d)", curtime);
+ error("This is an ERROR test (%d)", curtime);
+}
+
+
+function info() {
+ var orig = arguments["0"];
+ var ts = new Date().toISOString();
+ //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
+ arguments["0"] = "[??INFO??] ("+ts+"): "+orig;
+
+ if(global.repoproxy.loglevel >= 3) {
+ sendLog.apply(this, arguments);
+ }
+}
+
+function debug() {
+ var orig = arguments["0"];
+ var ts = new Date().toISOString();
+ //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
+ arguments["0"] = "[^^DEBUG^^] ("+ts+"): "+orig;
+
+ if(global.repoproxy.loglevel >= 4) {
+ sendLog.apply(this, arguments);
+ }
+}
+
+function warning() {
+ var orig = arguments["0"];
+ var ts = new Date().toISOString();
+ //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
+ arguments["0"] = "[!!WARNING!!] ("+ts+"): "+orig;
+
+ if(global.repoproxy.loglevel >= 2) {
+ sendLog.apply(this, arguments);
+ }
+}
+
+function error() {
+ var orig = arguments["0"];
+ var ts = new Date().toISOString();
+ //var ts = ts_t.format("%Y-%m-%d %H:%M:%S");
+ arguments["0"] = "[**ERROR**] ("+ts+"): "+orig;
+
+ if(global.repoproxy.loglevel >= 1) {
+ sendLog.apply(this, arguments);
+ }
+}
+
+function sendLog() {
+ console.log.apply(this, arguments);
+}
+
+
+exports.info = info;
+exports.debug = debug;
+exports.warning = warning;
+exports.error = error;
+exports.testLogSettings = testLogSettings;
});
};
-exports.walkDir = walkDir;
-function cleanupRoutine() {
-
-}
\ No newline at end of file
+
+exports.walkDir = walkDir;
var http = require("http");
var url = require("url");
var path = require("path");
+var cache = require("./cache.js");
+var log = require("./log.js");
function maintain(name, repoinfo, files) {
- console.log("doing yum clean for ", repoinfo);
+ log.info("Starting maintenance routine for yum repo %s (%s)", name, repoinfo.url);
var metaAge = 24*3600*1000*(repoinfo.updateinterval);
var expireAge = 24*3600*1000*(repoinfo.expiretime);
function fileCheck(i) {
- console.log("checking file: ", files[i]);
+ log.debug("checking file: ", files[i]);
// we look for repodata/repomd.xml file, if this is past maturity, we clean that whole directory
if(files[i].match(/.*repodata\/repomd\.xml$/)) {
- console.log("Found repomd.xml file: ", files[i]);
+ log.debug("Found repomd.xml file: ", files[i]);
// strip the repomd file to get the dir
var repomddir = files[i].replace(/repomd\.xml$/, "");
- console.log("repomd dir is:", repomddir);
+ log.debug("repomd dir is:", repomddir);
// do the file stat
fs.stat(files[i], function(err, stats) {
- console.log("stats for file was: ", stats);
+ log.debug("stats for file was: ", stats);
var curtime = new Date().getTime();
var ctime = stats.ctime.getTime();
- console.log("curtime is ", curtime);
- console.log("ctime is ", ctime);
+ log.debug("curtime is ", curtime);
+ log.debug("ctime is ", ctime);
var age = curtime - ctime;
- console.log("age is (%d) for (%d)", age, metaAge);
+ log.debug("age is (%d) for (%d)", age, metaAge);
if(age > metaAge) {
// TODO: cleanup repomd directory
+ cache.moveToCleanup(repomddir);
+ log.info("Sending repomd directory to trash for cleanup (%s)", repomddir);
}
})
} else {
// STUFF!!!
fs.stat(files[i], function(err, stats) {
- console.log("stats for file was: ", stats);
+ log.debug("stats for file was: ", stats);
var curtime = new Date().getTime();
var atime = stats.atime.getTime();
- console.log("curtime is ", curtime);
- console.log("ctime is ", atime);
+ log.debug("curtime is ", curtime);
+ log.debug("ctime is ", atime);
var age = curtime - atime;
- console.log("age is (%d) for (%d)", age, expireAge);
+ //console.log("age is (%d) for (%d)", age, expireAge);
if(age > expireAge) {
// TODO: cleanup singular file
// TODO: cleanup meta too, fuck me
- console.log("clean up file \n", files[i]);
+ //console.log("clean up file \n", files[i]);
var metafile = files[i].replace(/(.*)\/(.[^\/]+$)/, "$1/.meta.$2.filesize");
//console.log("meta for this file is \n", nfile);
- fs.unlink(files[i]);
- fs.unlink(metafile);
+ cache.moveToCleanup(files[i]);
+ cache.moveToCleanup(metafile);
+ log.info("Sending expired file to trash for cleanup (%s)", files[i]);
}
})
}
}
if(typeof files[0] != 'undefined') fileCheck(0)
- else console.log("Skipping (yum) file check as there are none... apprently?");
+ else log.info("Skipping (yum) file check as there are none... apprently?");
}
var fs = require("fs");
var cache = require("./cache.js");
var path = require("path");
+var log = require("./log.js");
exports.routeRequest = function(req, res) {
// first, unify the request
console.log("Loading configuration");
config.loadConfig("./repos.conf");
-
+// start the maintenance timer
console.log("Starting cache maintenance timer");
maintain.startTimer();
# packageage is how long a package will go unread before it gets deleted (days)
repo:fedora:yum:http://ftp.iinet.net.au/pub/fedora/linux/:7:120
repo:ubuntu:apt:http://ftp.iinet.net.au/pub/ubuntu/:1:120
+
+# level for logging (1 = error, 2 = warning, 3=info, 4=debug);
+loggerlevel:3
--- /dev/null
+
+function logtest() {
+ console.log.apply(this, arguments);
+}
+
+var asdf = new Date();
+
+logtest("one", asdf);