From: Tyler Riddle Date: Fri, 21 Sep 2012 02:46:28 +0000 (-0700) Subject: add note on fixes for huge json problem X-Git-Url: http://git.shadowcat.co.uk/gitweb/gitweb.cgi?a=commitdiff_plain;h=10a2546f31d537e2c9ccc2eb3086785495f78ec3;p=scpubgit%2FSystem-Introspector.git add note on fixes for huge json problem --- diff --git a/lib/System/Introspector/Gatherer.pm b/lib/System/Introspector/Gatherer.pm index 052f130..be9b393 100644 --- a/lib/System/Introspector/Gatherer.pm +++ b/lib/System/Introspector/Gatherer.pm @@ -4,7 +4,7 @@ use Object::Remote; use Object::Remote::Future; use System::Introspector::Gatherer::Bridge; use Module::Runtime qw( use_module ); -use System::Introspector::Logger qw( :log ); +use System::Introspector::Logger qw( :log :dlog ); has introspectors => (is => 'ro', required => 1); @@ -38,7 +38,14 @@ sub gather_all { } log_trace { "gather_all() has completed" }; - + + #TODO easiest way to solve problem with huge + #JSON looks like it would be to hand back + #a report object with methods that access + #the individual parts of the report. That + #way the transfer will happen in chunks + #as _store in ::Stage pulls in the + #results return \%report; } diff --git a/lib/System/Introspector/State.pm b/lib/System/Introspector/State.pm index 7c89a4a..c4d5a41 100644 --- a/lib/System/Introspector/State.pm +++ b/lib/System/Introspector/State.pm @@ -35,6 +35,12 @@ sub gather { log_debug { "Waiting on futures for host '$host'" }; + #TODO another way to solve the huge JSON problem is to + #invoke the probes in from controller directly via proxy + #objects and receive the results from each probe as + #they complete - it would cause less RAM consumption for the + #system as a whole but requires modifying the future based + #syncronization logic my @data = await_all @futures; log_trace { "Received all from group '$group' on '$host'" }; $self->_store($host, $group, +{ map %$_, @data });