var http = require("http");
var url = require("url");
var path = require("path");
-
-function maintainCache() {
- // TODO i should check that im already running here and exit if i am
- console.log("Cache maintainence routine starting...");
- console.log("Cache maintainence routine ended...");
-}
-
-exports.startTimer = function() {
- // our once-a-day cache maintainer
- var cacheTimer = global.repoproxy.scancache*3600*1000;
- //var cacheTimer = global.repoproxy.scancache*100;
- setInterval(maintainCache, cacheTimer);
-}
+var crypto = require("crypto");
+var log = require("./log.js");
function upstreamRequest(unify) {
// first do a head request
- console.log("upsteram as ", unify.requestFor);
+ log.debug("upsteram as ", unify.requestFor);
var endData = false;
var xpath = "";
// not doing this properly yet...
if(typeof global.repoproxy.downloads[unify.fullFilePath] != undefined && global.repoproxy.downloads[unify.fullFilePath] == 1) {
- console.log("request for file thats being downloaded already, doing inline request");
+ log.debug("request for file thats being downloaded already, doing inline request");
inlineService(unify);
return;
}
- console.log("sending off to '%s'", xpath);
+ log.debug("sending off to '%s'", xpath);
var headReq = url.parse(xpath);
headReq["method"] = "HEAD";
//res.setEncoding("utf8");
if(!endData) {
- console.log("status code is ", typeof res.statusCode);
+ log.debug("status code is ", typeof res.statusCode);
switch(res.statusCode) {
// TODO: this 301 directory redirect thing needs to work better
case 301:
var against = against_t.substr(against_t.length-4);
if(loc == against) {
- console.log("got a redirect, upstream for loc => loc/ assuming its a directory");
+ log.debug("got a redirect, upstream for loc => loc/ assuming its a directory");
makeCacheDir(unify);
unify.b.writeHead(302, { "Location": unify.originalReq + "/" });
} else {
- console.log("checked '%s' against '%s', was false, sending 404", loc, against);
+ log.debug("checked '%s' against '%s', was false, sending 404", loc, against);
unify.b.writeHead(404, {"Content-Type": "text/plain"});
unify.b.write("404 Not Found\n");
}
} else {
// this is where it gets ugly
var filesize = res.headers["content-length"];
- console.log("do ugly write: ", unify);
+ log.debug("do ugly write: ", unify);
//unify.b.write(data);
var metafilename = unify.fullPathDirName + "/.meta."+ path.basename(unify.requestFor) +".filesize";
var metafile = fs.createWriteStream(metafilename);
}
break;
default:
- console.log(".... data");
+ log.debug(".... data");
//unify.b.write(data);
}
}
- //console.log("res is now ", res);
+ //log.debug("res is now ", res);
});
getup.end();
- //console.log("getup: ", getup);
+ //log.debug("getup: ", getup);
}
exports.upstreamRequest = upstreamRequest;
function getAndService(unify, xpath, filesize) {
- console.log("calling in here with filesize, ", filesize)
+ log.debug("calling in here with filesize, ", filesize)
unify.b.writeHead(200, {'Content-Length' : filesize});
-
global.repoproxy.downloads[unify.fullFilePath] = 1;
-
http.get(xpath, function(res) {
var file = fs.createWriteStream(unify.fullFilePath);
- //console.log("res: ", res);
+ //log.debug("res: ", res);
//res.setEncoding("utf8");
res.on("data", function(data) {
- //console.log("chunk");
+ //log.debug("chunk");
file.write(data);
unify.b.write(data);
});
res.on("end", function() {
- console.log("end...");
+ log.debug("end...");
unify.b.end();
file.end();
global.repoproxy.downloads[unify.fullFilePath] = 0;
});
res.on("error", function(err) {
- console.log("res threw error... ", err);
+ log.debug("res threw error... ", err);
});
});
}
var metafilename = unify.fullPathDirName + "/.meta."+ path.basename(unify.requestFor) +".filesize";
var fsizef = fs.createReadStream(metafilename);
var fsize = "";
+ var lastchunk = 0;
fsizef.on("data", function(data) {
fsize += data;
});
unify.b.writeHead(200, {"Content-Length" : fsize });
// now we go into the file reading loop.
- console.log("start of inline services");
+ log.debug("start of inline services");
// we loop every 0.5s and do our thing
function sendPieces() {
fs.stat(unify.fullFilePath, function(err, stats) {
if(err == null) {
if(stats["size"] > sentSoFar) {
+ // if file size changed between last chunk and this chunk, send the chunks
+
+ lastChunk = 0;
// open the file, send the data
var rs = fs.createReadStream(unify.fullFilePath, {start: sentSoFar, end: stats["size"]});
rs.on("data", function(thisdata) {
- //console.log("inline chunk: ", thisdata.length);
+ //log.debug("inline chunk: ", thisdata.length);
unify.b.write(thisdata);
});
unify.b.end();
}
});
+ } else {
+ // if file size did not change between last timeout and this one, incremement the chunk counter
+ // if we reach 60, we had a problem, and so we bomb out
+
+ lastChunk++;
+
+ // we bombed out somehow
+ if(lastChunk > 60) {
+ unify.b.end();
+ } else {
+ setTimeout(sendPieces, 1000);
+ }
}
} else {
- console.log("inline service - we're in a very bad place");
+ log.error("inline service - we're in a very bad place, how we ended up here we dont know, but we need to crash");
+ process.exit(10);
}
});
var rfsize = stats["size"];
if(rfsize != fsize.trim()) {
// remove the file and start again
- console.log("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]);
+ log.debug("reported filesizes dont match, '%s', '%s', removing file and starting again", rfsize, stats["size"]);
try {
fs.unlink(metafilename, function(){
fs.unlink(unify.fullFilePath, function(){
});
});
} else {
- console.log("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath);
+ log.debug("file, '%s' exists but has no filesize meta data, assuming it was put here manually and servicing", unify.fullFilePath);
unify.b.writeHead(200, {"Content-Length" : unify.fileSize})
callback();
}
}
function makeCacheDir(path) {
- console.log("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName);
+ log.debug("attempting to create... '%s' as '%s'", path.fullPathDirName, path.subPathDirName);
var startAt = path.topFullPath;
var nextbits = path.subPathDirName.split("/");
for(var i=0; i < nextbits.length; i++) {
startAt += "/" + nextbits[i];
- console.log("attempt mkdir on '%s'", startAt);
+ log.debug("attempt mkdir on '%s'", startAt);
try {
fs.mkdirSync(startAt);
} catch(e) {
- //console.log("e in mkdir, ", e);
+ //log.debug("e in mkdir, ", e);
}
}
//process.exit(0);
res.write("<html><h1>Directory listing for " + unify.originalReq + "</h1><hr><pre>");
if(unify.originalReq != "/") res.write("<a href=\"..\">Parent</a>\n\n");
fs.readdir(unify.fullFilePath, function(err, files) {
- console.log("doing directory listing on: ", unify.fullFilePath);
+ log.debug("doing directory listing on: ", unify.fullFilePath);
if(err == null) {
// TODO: make this work asynchronously...
nfiles++;
}
} else {
- console.log("ignoring file, ", files[i]);
+ log.debug("ignoring file, ", files[i]);
}
}
});
}
-exports.serviceDirectory = serviceDirectory;
\ No newline at end of file
+function moveToCleanup(file_or_dir) {
+ // err..?
+ var cleanup = global.repoproxy.cacheDir + "/.cleanup";
+ var ctime = new Date().getTime();
+ var encoded = (++global.repoproxy.fileid).toString();
+ var toloc = cleanup + "/" + ctime.toString() + "." + encoded;
+
+ //log.debug("Moving %s to %s for cleanup", file_or_dir.replace(/\/$/, ""), toloc);
+
+ fs.renameSync(file_or_dir.replace(/\/$/, ""), toloc);
+}
+
+function cleanupRoutine() {
+
+}
+
+
+exports.serviceDirectory = serviceDirectory;
+exports.moveToCleanup = moveToCleanup;