<)))°> ><)))°> ><)))°> ><)))°> ><)))°> ><)))°> ><)))°> ><)))°> ><)))°> * * Live Stats top style * * @author Cyrille Mahieux : elijaa(at)free.fr * @since 12/04/2010 */ # Require require_once 'Library/Bootstrap.php'; # Initializing requests $request = (isset($_REQUEST['request_command'])) ? $_REQUEST['request_command'] : null; # Stat of a particular cluster if (isset($_REQUEST['cluster']) && ($_REQUEST['cluster'] != null)) { $cluster = $_REQUEST['cluster']; } else { # Getting default cluster $clusters = array_keys($_ini->get('servers')); $cluster = isset($clusters[0]) ? $clusters[0] : null; $_REQUEST['cluster'] = $cluster; } # Checking writing status in temporary folder if (is_writable($_ini->get('file_path')) === false) { # Trying to change permissions chmod($_ini->get('file_path'), 0775); } # Hashing cluster $hash = md5($_REQUEST['cluster']); # Cookie @FIXME not a perfect method if (! isset($_COOKIE['live_stats_id' . $hash])) { # Cleaning temporary directory $files = glob($_ini->get('file_path') . '*', GLOB_NOSORT); foreach ($files as $path) { # Getting file last modification time $stats = @stat($path); # Deleting file older than 24 hours if (isset($stats['mtime']) && ($stats['mtime'] < (time() - 60 * 60 * 24))) { @unlink($path); } } # Generating unique id $live_stats_id = rand() . $hash; # Cookie setcookie('live_stats_id' . $hash, $live_stats_id, time() + 60 * 60 * 24); } else { # Backup from a previous request $live_stats_id = $_COOKIE['live_stats_id' . $hash]; } # Live stats dump file $file_path = rtrim($_ini->get('file_path'), '/') . DIRECTORY_SEPARATOR . 'live_stats.' . $live_stats_id; # Display by request type switch ($request) { # Ajax ask : stats case 'live_stats' : # Opening old stats dump $previous = @unserialize(file_get_contents($file_path)); # Initializing variables $actual = array(); $stats = array(); $time = 0; # Requesting stats for each server foreach ($_ini->cluster($cluster) as $name => $server) { # Start query time calculation $time = microtime(true); # Asking server for stats $actual[$name] = Library_Command_Factory::instance('stats_api')->stats($server['hostname'], $server['port']); # Calculating query time length $actual[$name]['query_time'] = max((microtime(true) - $time) * 1000, 1); } # Analysing stats foreach ($_ini->cluster($cluster) as $name => $server) { # Making an alias @FIXME Used ? $server = $name; # Diff between old and new dump $stats[$server] = Library_Data_Analysis::diff($previous[$server], $actual[$server]); } # Making stats for each server foreach ($stats as $server => $array) { # Analysing request if ((isset($stats[$server]['uptime'])) && ($stats[$server]['uptime'] > 0)) { # Computing stats $stats[$server] = Library_Data_Analysis::stats($stats[$server]); # Because we make a diff on every key, we must reasign some values $stats[$server]['bytes_percent'] = sprintf('%.1f', $actual[$server]['bytes'] / $actual[$server]['limit_maxbytes'] * 100); $stats[$server]['bytes'] = $actual[$server]['bytes']; $stats[$server]['limit_maxbytes'] = $actual[$server]['limit_maxbytes']; $stats[$server]['curr_connections'] = $actual[$server]['curr_connections']; $stats[$server]['query_time'] = $actual[$server]['query_time']; } } # Saving new stats dump file_put_contents($file_path, serialize($actual)); # Showing stats include 'View/LiveStats/Stats.phtml'; break; # Default : No command default : # Initializing : making stats dump $stats = array(); foreach ($_ini->cluster($cluster) as $name => $server) { $stats[$name] = Library_Command_Factory::instance('stats_api')->stats($server['hostname'], $server['port']); } # Saving first stats dump file_put_contents($file_path, serialize($stats)); # Searching for connection error, adding some time to refresh rate to prevent error $refresh_rate = max($_ini->get('refresh_rate'), count($_ini->cluster($cluster)) * 0.25 + (Library_Data_Error::count() * (0.5 + $_ini->get('connection_timeout')))); # Showing header include 'View/Header.phtml'; # Showing live stats frame include 'View/LiveStats/Frame.phtml'; # Showing footer include 'View/Footer.phtml'; break; }