2005-10-19 jrandom
* Bugfix for the auto-update code to handle different usage patterns * Decreased the addressbook recheck frequency to once every 12 hours instead of hourly. * Handle dynamically changing the HMAC size (again, unless your nym is toad or jrandom, ignore this ;) * Cleaned up some synchronization/locking code
This commit is contained in:
@ -143,7 +143,7 @@ public class Daemon {
|
|||||||
defaultSettings.put("subscriptions", "subscriptions.txt");
|
defaultSettings.put("subscriptions", "subscriptions.txt");
|
||||||
defaultSettings.put("etags", "etags");
|
defaultSettings.put("etags", "etags");
|
||||||
defaultSettings.put("last_modified", "last_modified");
|
defaultSettings.put("last_modified", "last_modified");
|
||||||
defaultSettings.put("update_delay", "1");
|
defaultSettings.put("update_delay", "12");
|
||||||
|
|
||||||
File homeFile = new File(home);
|
File homeFile = new File(home);
|
||||||
if (!homeFile.exists()) {
|
if (!homeFile.exists()) {
|
||||||
|
@ -31,7 +31,9 @@ public class UpdateHandler {
|
|||||||
|
|
||||||
private static final String SIGNED_UPDATE_FILE = "i2pupdate.sud";
|
private static final String SIGNED_UPDATE_FILE = "i2pupdate.sud";
|
||||||
|
|
||||||
public UpdateHandler() {}
|
public UpdateHandler() {
|
||||||
|
this(ContextHelper.getContext(null));
|
||||||
|
}
|
||||||
public UpdateHandler(RouterContext ctx) {
|
public UpdateHandler(RouterContext ctx) {
|
||||||
_context = ctx;
|
_context = ctx;
|
||||||
_log = ctx.logManager().getLog(UpdateHandler.class);
|
_log = ctx.logManager().getLog(UpdateHandler.class);
|
||||||
|
@ -128,10 +128,13 @@ public class HMACSHA256Generator {
|
|||||||
if (_available.size() > 0)
|
if (_available.size() > 0)
|
||||||
return (HMac)_available.remove(0);
|
return (HMac)_available.remove(0);
|
||||||
}
|
}
|
||||||
|
// the HMAC is hardcoded to use SHA256 digest size
|
||||||
|
// for backwards compatability. next time we have a backwards
|
||||||
|
// incompatible change, we should update this by removing ", 32"
|
||||||
if (_useMD5)
|
if (_useMD5)
|
||||||
return new HMac(new MD5Digest());
|
return new HMac(new MD5Digest(), 32);
|
||||||
else
|
else
|
||||||
return new HMac(new SHA256Digest());
|
return new HMac(new SHA256Digest(), 32);
|
||||||
}
|
}
|
||||||
private void release(HMac mac) {
|
private void release(HMac mac) {
|
||||||
synchronized (_available) {
|
synchronized (_available) {
|
||||||
|
@ -115,13 +115,14 @@ public class BufferedStatLog implements StatLog {
|
|||||||
int writeStart = -1;
|
int writeStart = -1;
|
||||||
int writeEnd = -1;
|
int writeEnd = -1;
|
||||||
while (true) {
|
while (true) {
|
||||||
|
try {
|
||||||
synchronized (_events) {
|
synchronized (_events) {
|
||||||
if (_eventNext > _lastWrite) {
|
if (_eventNext > _lastWrite) {
|
||||||
if (_eventNext - _lastWrite < _flushFrequency)
|
if (_eventNext - _lastWrite < _flushFrequency)
|
||||||
try { _events.wait(30*1000); } catch (InterruptedException ie) {}
|
_events.wait(30*1000);
|
||||||
} else {
|
} else {
|
||||||
if (_events.length - 1 - _lastWrite + _eventNext < _flushFrequency)
|
if (_events.length - 1 - _lastWrite + _eventNext < _flushFrequency)
|
||||||
try { _events.wait(30*1000); } catch (InterruptedException ie) {}
|
_events.wait(30*1000);
|
||||||
}
|
}
|
||||||
writeStart = (_lastWrite + 1) % _events.length;
|
writeStart = (_lastWrite + 1) % _events.length;
|
||||||
writeEnd = _eventNext;
|
writeEnd = _eventNext;
|
||||||
@ -136,6 +137,7 @@ public class BufferedStatLog implements StatLog {
|
|||||||
_log.error("error writing " + writeStart +"->"+ writeEnd, e);
|
_log.error("error writing " + writeStart +"->"+ writeEnd, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (InterruptedException ie) {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,9 +58,14 @@ implements Mac
|
|||||||
|
|
||||||
public HMac(
|
public HMac(
|
||||||
Digest digest)
|
Digest digest)
|
||||||
|
{
|
||||||
|
this(digest, digest.getDigestSize());
|
||||||
|
}
|
||||||
|
public HMac(
|
||||||
|
Digest digest, int sz)
|
||||||
{
|
{
|
||||||
this.digest = digest;
|
this.digest = digest;
|
||||||
digestSize = digest.getDigestSize();
|
this.digestSize = sz;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getAlgorithmName()
|
public String getAlgorithmName()
|
||||||
@ -141,7 +146,7 @@ implements Mac
|
|||||||
byte[] out,
|
byte[] out,
|
||||||
int outOff)
|
int outOff)
|
||||||
{
|
{
|
||||||
byte[] tmp = acquireTmp();
|
byte[] tmp = acquireTmp(digestSize);
|
||||||
//byte[] tmp = new byte[digestSize];
|
//byte[] tmp = new byte[digestSize];
|
||||||
digest.doFinal(tmp, 0);
|
digest.doFinal(tmp, 0);
|
||||||
|
|
||||||
@ -156,23 +161,27 @@ implements Mac
|
|||||||
return len;
|
return len;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ArrayList _tmpBuf = new ArrayList();
|
/**
|
||||||
private static byte[] acquireTmp() {
|
* list of buffers - index 0 is the cache for 32 byte arrays, while index 1 is the cache for 16 byte arrays
|
||||||
|
*/
|
||||||
|
private static ArrayList _tmpBuf[] = new ArrayList[] { new ArrayList(), new ArrayList() };
|
||||||
|
private static byte[] acquireTmp(int sz) {
|
||||||
byte rv[] = null;
|
byte rv[] = null;
|
||||||
synchronized (_tmpBuf) {
|
synchronized (_tmpBuf[sz == 32 ? 0 : 1]) {
|
||||||
if (_tmpBuf.size() > 0)
|
if (_tmpBuf[sz == 32 ? 0 : 1].size() > 0)
|
||||||
rv = (byte[])_tmpBuf.remove(0);
|
rv = (byte[])_tmpBuf[sz == 32 ? 0 : 1].remove(0);
|
||||||
}
|
}
|
||||||
if (rv != null)
|
if (rv != null)
|
||||||
Arrays.fill(rv, (byte)0x0);
|
Arrays.fill(rv, (byte)0x0);
|
||||||
else
|
else
|
||||||
rv = new byte[32]; // hard coded against SHA256 (should be digestSize)
|
rv = new byte[sz];
|
||||||
return rv;
|
return rv;
|
||||||
}
|
}
|
||||||
private static void releaseTmp(byte buf[]) {
|
private static void releaseTmp(byte buf[]) {
|
||||||
synchronized (_tmpBuf) {
|
if (buf == null) return;
|
||||||
if (_tmpBuf.size() < 100)
|
synchronized (_tmpBuf[buf.length == 32 ? 0 : 1]) {
|
||||||
_tmpBuf.add((Object)buf);
|
if (_tmpBuf[buf.length == 32 ? 0 : 1].size() < 100)
|
||||||
|
_tmpBuf[buf.length == 32 ? 0 : 1].add((Object)buf);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
10
history.txt
10
history.txt
@ -1,4 +1,12 @@
|
|||||||
$Id: history.txt,v 1.299 2005/10/17 19:39:46 jrandom Exp $
|
$Id: history.txt,v 1.300 2005/10/17 22:14:01 dust Exp $
|
||||||
|
|
||||||
|
2005-10-19 jrandom
|
||||||
|
* Bugfix for the auto-update code to handle different usage patterns
|
||||||
|
* Decreased the addressbook recheck frequency to once every 12 hours
|
||||||
|
instead of hourly.
|
||||||
|
* Handle dynamically changing the HMAC size (again, unless your nym is
|
||||||
|
toad or jrandom, ignore this ;)
|
||||||
|
* Cleaned up some synchronization/locking code
|
||||||
|
|
||||||
2005-10-17 dust
|
2005-10-17 dust
|
||||||
* Exchange the remaining URL with EepGet in Sucker.
|
* Exchange the remaining URL with EepGet in Sucker.
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
export I2P=~i2p/i2p
|
#export I2P=~i2p/i2p
|
||||||
|
export I2P=.
|
||||||
java -cp $I2P/lib/i2p.jar net.i2p.util.EepGet $*
|
java -cp $I2P/lib/i2p.jar net.i2p.util.EepGet $*
|
||||||
|
@ -135,6 +135,7 @@ public class JobQueue {
|
|||||||
|
|
||||||
long numReady = 0;
|
long numReady = 0;
|
||||||
boolean alreadyExists = false;
|
boolean alreadyExists = false;
|
||||||
|
boolean dropped = false;
|
||||||
synchronized (_jobLock) {
|
synchronized (_jobLock) {
|
||||||
if (_readyJobs.contains(job))
|
if (_readyJobs.contains(job))
|
||||||
alreadyExists = true;
|
alreadyExists = true;
|
||||||
@ -144,17 +145,10 @@ public class JobQueue {
|
|||||||
alreadyExists = true;
|
alreadyExists = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
_context.statManager().addRateData("jobQueue.readyJobs", numReady, 0);
|
|
||||||
if (shouldDrop(job, numReady)) {
|
if (shouldDrop(job, numReady)) {
|
||||||
if (_log.shouldLog(Log.WARN))
|
|
||||||
_log.warn("Dropping job due to overload! # ready jobs: "
|
|
||||||
+ numReady + ": job = " + job);
|
|
||||||
job.dropped();
|
job.dropped();
|
||||||
_context.statManager().addRateData("jobQueue.droppedJobs", 1, 1);
|
dropped = true;
|
||||||
_jobLock.notifyAll();
|
} else {
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!alreadyExists) {
|
if (!alreadyExists) {
|
||||||
if (job.getTiming().getStartAfter() <= _context.clock().now()) {
|
if (job.getTiming().getStartAfter() <= _context.clock().now()) {
|
||||||
// don't skew us - its 'start after' its been queued, or later
|
// don't skew us - its 'start after' its been queued, or later
|
||||||
@ -162,16 +156,22 @@ public class JobQueue {
|
|||||||
if (job instanceof JobImpl)
|
if (job instanceof JobImpl)
|
||||||
((JobImpl)job).madeReady();
|
((JobImpl)job).madeReady();
|
||||||
_readyJobs.add(job);
|
_readyJobs.add(job);
|
||||||
_jobLock.notifyAll();
|
|
||||||
} else {
|
} else {
|
||||||
_timedJobs.add(job);
|
_timedJobs.add(job);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
_jobLock.notifyAll();
|
_jobLock.notifyAll();
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if (_log.shouldLog(Log.DEBUG))
|
_context.statManager().addRateData("jobQueue.readyJobs", numReady, 0);
|
||||||
_log.debug("Not adding already enqueued job " + job.getName());
|
if (dropped) {
|
||||||
}
|
_context.statManager().addRateData("jobQueue.droppedJobs", 1, 1);
|
||||||
|
if (_log.shouldLog(Log.WARN))
|
||||||
|
_log.warn("Dropping job due to overload! # ready jobs: "
|
||||||
|
+ numReady + ": job = " + job);
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -329,13 +329,15 @@ public class JobQueue {
|
|||||||
*/
|
*/
|
||||||
Job getNext() {
|
Job getNext() {
|
||||||
while (_alive) {
|
while (_alive) {
|
||||||
|
try {
|
||||||
synchronized (_jobLock) {
|
synchronized (_jobLock) {
|
||||||
if (_readyJobs.size() > 0) {
|
if (_readyJobs.size() > 0) {
|
||||||
return (Job)_readyJobs.remove(0);
|
return (Job)_readyJobs.remove(0);
|
||||||
} else {
|
} else {
|
||||||
try { _jobLock.wait(); } catch (InterruptedException ie) {}
|
_jobLock.wait();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (InterruptedException ie) {}
|
||||||
}
|
}
|
||||||
if (_log.shouldLog(Log.WARN))
|
if (_log.shouldLog(Log.WARN))
|
||||||
_log.warn("No longer alive, returning null");
|
_log.warn("No longer alive, returning null");
|
||||||
@ -403,6 +405,7 @@ public class JobQueue {
|
|||||||
long now = _context.clock().now();
|
long now = _context.clock().now();
|
||||||
long timeToWait = -1;
|
long timeToWait = -1;
|
||||||
ArrayList toAdd = null;
|
ArrayList toAdd = null;
|
||||||
|
try {
|
||||||
synchronized (_jobLock) {
|
synchronized (_jobLock) {
|
||||||
for (int i = 0; i < _timedJobs.size(); i++) {
|
for (int i = 0; i < _timedJobs.size(); i++) {
|
||||||
Job j = (Job)_timedJobs.get(i);
|
Job j = (Job)_timedJobs.get(i);
|
||||||
@ -442,11 +445,10 @@ public class JobQueue {
|
|||||||
timeToWait = 10*1000;
|
timeToWait = 10*1000;
|
||||||
//if (_log.shouldLog(Log.DEBUG))
|
//if (_log.shouldLog(Log.DEBUG))
|
||||||
// _log.debug("Waiting " + timeToWait + " before rechecking the timed queue");
|
// _log.debug("Waiting " + timeToWait + " before rechecking the timed queue");
|
||||||
try {
|
|
||||||
_jobLock.wait(timeToWait);
|
_jobLock.wait(timeToWait);
|
||||||
} catch (InterruptedException ie) {}
|
|
||||||
}
|
}
|
||||||
} // synchronize (_jobLock)
|
} // synchronize (_jobLock)
|
||||||
|
} catch (InterruptedException ie) {}
|
||||||
} // while (_alive)
|
} // while (_alive)
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
_context.clock().removeUpdateListener(this);
|
_context.clock().removeUpdateListener(this);
|
||||||
|
@ -15,9 +15,9 @@ import net.i2p.CoreVersion;
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class RouterVersion {
|
public class RouterVersion {
|
||||||
public final static String ID = "$Revision: 1.270 $ $Date: 2005/10/14 08:48:05 $";
|
public final static String ID = "$Revision: 1.271 $ $Date: 2005/10/17 19:39:46 $";
|
||||||
public final static String VERSION = "0.6.1.3";
|
public final static String VERSION = "0.6.1.3";
|
||||||
public final static long BUILD = 1;
|
public final static long BUILD = 2;
|
||||||
public static void main(String args[]) {
|
public static void main(String args[]) {
|
||||||
System.out.println("I2P Router version: " + VERSION + "-" + BUILD);
|
System.out.println("I2P Router version: " + VERSION + "-" + BUILD);
|
||||||
System.out.println("Router ID: " + RouterVersion.ID);
|
System.out.println("Router ID: " + RouterVersion.ID);
|
||||||
|
Reference in New Issue
Block a user