use Object::Remote::Future;
use System::Introspector::Gatherer::Bridge;
use Module::Runtime qw( use_module );
-use System::Introspector::Logger qw( :log );
+use System::Introspector::Logger qw( :log :dlog );
has introspectors => (is => 'ro', required => 1);
}
log_trace { "gather_all() has completed" };
-
+
+ #TODO easiest way to solve problem with huge
+ #JSON looks like it would be to hand back
+ #a report object with methods that access
+ #the individual parts of the report. That
+ #way the transfer will happen in chunks
+ #as _store in ::Stage pulls in the
+ #results
return \%report;
}
log_debug { "Waiting on futures for host '$host'" };
+ #TODO another way to solve the huge JSON problem is to
+ #invoke the probes in from controller directly via proxy
+ #objects and receive the results from each probe as
+ #they complete - it would cause less RAM consumption for the
+ #system as a whole but requires modifying the future based
+ #syncronization logic
my @data = await_all @futures;
log_trace { "Received all from group '$group' on '$host'" };
$self->_store($host, $group, +{ map %$_, @data });