X-Git-Url: http://git.pjr.cc/?p=nodejs-repoproxy.git;a=blobdiff_plain;f=lib%2Fcache.js;fp=lib%2Fcache.js;h=4f98d3c8e32b80e986d8a98fddd92df7509614e4;hp=588bc7315ef3c6ef9ab13a30708c02ba6f3d17f1;hb=3933776341028e83d89e0f888fff3e6319230abf;hpb=d7478970127408f056b157f18df74fef7db3f892 diff --git a/lib/cache.js b/lib/cache.js index 588bc73..4f98d3c 100644 --- a/lib/cache.js +++ b/lib/cache.js @@ -3,10 +3,11 @@ var http = require("http"); var url = require("url"); var path = require("path"); var crypto = require("crypto"); +var log = require("./log.js"); function upstreamRequest(unify) { // first do a head request - console.log("upsteram as ", unify.requestFor); + log.debug("upsteram as ", unify.requestFor); var endData = false; var xpath = ""; @@ -21,12 +22,12 @@ function upstreamRequest(unify) { // not doing this properly yet... if(typeof global.repoproxy.downloads[unify.fullFilePath] != undefined && global.repoproxy.downloads[unify.fullFilePath] == 1) { - console.log("request for file thats being downloaded already, doing inline request"); + log.debug("request for file thats being downloaded already, doing inline request"); inlineService(unify); return; } - console.log("sending off to '%s'", xpath); + log.debug("sending off to '%s'", xpath); var headReq = url.parse(xpath); headReq["method"] = "HEAD"; @@ -35,7 +36,7 @@ function upstreamRequest(unify) { //res.setEncoding("utf8"); if(!endData) { - console.log("status code is ", typeof res.statusCode); + log.debug("status code is ", typeof res.statusCode); switch(res.statusCode) { // TODO: this 301 directory redirect thing needs to work better case 301: @@ -46,11 +47,11 @@ function upstreamRequest(unify) { var against = against_t.substr(against_t.length-4); if(loc == against) { - console.log("got a redirect, upstream for loc => loc/ assuming its a directory"); + log.debug("got a redirect, upstream for loc => loc/ assuming its a directory"); makeCacheDir(unify); unify.b.writeHead(302, { "Location": unify.originalReq + "/" }); } else { - console.log("checked '%s' against '%s', was false, sending 404", loc, against); + log.debug("checked '%s' against '%s', was false, sending 404", loc, against); unify.b.writeHead(404, {"Content-Type": "text/plain"}); unify.b.write("404 Not Found\n"); } @@ -72,7 +73,7 @@ function upstreamRequest(unify) { } else { // this is where it gets ugly var filesize = res.headers["content-length"]; - console.log("do ugly write: ", unify); + log.debug("do ugly write: ", unify); //unify.b.write(data); var metafilename = unify.fullPathDirName + "/.meta."+ path.basename(unify.requestFor) +".filesize"; var metafile = fs.createWriteStream(metafilename); @@ -83,23 +84,23 @@ function upstreamRequest(unify) { } break; default: - console.log(".... data"); + log.debug(".... data"); //unify.b.write(data); } } - //console.log("res is now ", res); + //log.debug("res is now ", res); }); getup.end(); - //console.log("getup: ", getup); + //log.debug("getup: ", getup); } exports.upstreamRequest = upstreamRequest; function getAndService(unify, xpath, filesize) { - console.log("calling in here with filesize, ", filesize) + log.debug("calling in here with filesize, ", filesize) unify.b.writeHead(200, {'Content-Length' : filesize}); @@ -110,25 +111,25 @@ function getAndService(unify, xpath, filesize) { var file = fs.createWriteStream(unify.fullFilePath); - //console.log("res: ", res); + //log.debug("res: ", res); //res.setEncoding("utf8"); res.on("data", function(data) { - //console.log("chunk"); + //log.debug("chunk"); file.write(data); unify.b.write(data); }); res.on("end", function() { - console.log("end..."); + log.debug("end..."); unify.b.end(); file.end(); global.repoproxy.downloads[unify.fullFilePath] = 0; }); res.on("error", function(err) { - console.log("res threw error... ", err); + log.debug("res threw error... ", err); }); }); } @@ -149,7 +150,7 @@ function inlineService(unify) { unify.b.writeHead(200, {"Content-Length" : fsize }); // now we go into the file reading loop. - console.log("start of inline services"); + log.debug("start of inline services"); // we loop every 0.5s and do our thing function sendPieces() { @@ -164,7 +165,7 @@ function inlineService(unify) { var rs = fs.createReadStream(unify.fullFilePath, {start: sentSoFar, end: stats["size"]}); rs.on("data", function(thisdata) { - //console.log("inline chunk: ", thisdata.length); + //log.debug("inline chunk: ", thisdata.length); unify.b.write(thisdata); }); @@ -192,7 +193,7 @@ function inlineService(unify) { } } } else { - console.log("inline service - we're in a very bad place"); + log.debug("inline service - we're in a very bad place"); } }); @@ -245,7 +246,7 @@ function checkFile(unify, callback) { var rfsize = stats["size"]; if(rfsize != fsize.trim()) { // remove the file and start again - console.log("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]); + log.debug("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]); try { fs.unlink(metafilename, function(){ fs.unlink(unify.fullFilePath, function(){ @@ -263,7 +264,7 @@ function checkFile(unify, callback) { }); }); } else { - console.log("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath); + log.debug("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath); unify.b.writeHead(200, {"Content-Length" : unify.fileSize}) callback(); } @@ -271,17 +272,17 @@ function checkFile(unify, callback) { } function makeCacheDir(path) { - console.log("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName); + log.debug("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName); var startAt = path.topFullPath; var nextbits = path.subPathDirName.split("/"); for(var i=0; i < nextbits.length; i++) { startAt += "/" + nextbits[i]; - console.log("attempt mkdir on '%s'", startAt); + log.debug("attempt mkdir on '%s'", startAt); try { fs.mkdirSync(startAt); } catch(e) { - //console.log("e in mkdir, ", e); + //log.debug("e in mkdir, ", e); } } //process.exit(0); @@ -294,7 +295,7 @@ function serviceDirectory(unify) { res.write("

Directory listing for " + unify.originalReq + "


");
 	if(unify.originalReq != "/") res.write("Parent\n\n");
 	fs.readdir(unify.fullFilePath, function(err, files) {
-		console.log("doing directory listing on: ", unify.fullFilePath);
+		log.debug("doing directory listing on: ", unify.fullFilePath);
 		if(err == null) {
 			
 			// TODO: make this work asynchronously...
@@ -317,7 +318,7 @@ function serviceDirectory(unify) {
 						nfiles++;
 					}
 				} else {
-					console.log("ignoring file, ", files[i]);
+					log.debug("ignoring file, ", files[i]);
 				}
 			}
 			
@@ -340,7 +341,7 @@ function moveToCleanup(file_or_dir) {
 	var encoded = (++global.repoproxy.fileid).toString();
 	var toloc = cleanup + "/" + ctime.toString() + "." + encoded;
 	
-	//console.log("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
+	//log.debug("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
 	
 	fs.renameSync(file_or_dir.replace(/\/$/, ""), toloc);
 }