| 
<?php
require_once('itemCache.inc.php');
 /**
 * The code here is a rather effective demonstration of how inneffective
 * the itemcache object is for random access to a set of N values which
 * is significantly larger than the cache size M; at best the hit
 * ratio will be M/N
 * however this may still provide performance benefits using a
 * overflow + underflow handler since the resultant writes will
 * be batched. See also
 * http://dev.mysql.com/doc/refman/5.0/en/insert-speed.html
 *
 * As M->N, the hit ratio will -> 1.0
 *
 * On the other hand if there is some implicit grouping in the
 * input set of key values, then there is likely to be a big
 * performance benefit.
 */
 // ratio is the number of gets / number of adds
 $tests=array(
 array('size'=>50, 'minkey'=>0, 'maxkey'=>200, 'iters'=>250000, 'rwratio'=>0.5),
 array('size'=>200, 'minkey'=>0, 'maxkey'=>2000, 'iters'=>250000, 'rwratio'=>0.3),
 array('size'=>300, 'minkey'=>0, 'maxkey'=>1000, 'iters'=>250000, 'rwratio'=>0.1),
 array('size'=>300, 'minkey'=>0, 'maxkey'=>400, 'iters'=>250000, 'rwratio'=>0.1)
 );
 
 foreach ($tests as $t) {
 $start=microtime(true);
 $c=new itemCache($t['size']);
 for ($i=0; $i<$t['iters']; $i++) {
 $key=rand($t['minkey'], $t['maxkey']);
 if (rand(0,100)<$t['rwratio']) {
 $val=rand($t['minkey'], $t['maxkey']);
 $c->add($key, $val);
 } else {
 @$c->get($key);
 }
 }
 $complete=microtime(true);
 $r=$c->stats();
 $result=array_merge($t
 , $r
 , array('hitratio' => $r['hits']/($r['hits']+$r['misses'])
 , 'mem'          => memory_get_usage()
 , 'elapsed'      => ($complete-$start)
 )
 );
 print_r($result);
 }
 
 |