S
SSS Develop
Hello,
The intention to ask question in this forum is to improve this piece
of code (for performance and other best practices)
I have couple of Web Applications - need to monitor the HTTP Response
for them. Decided to use the Perl - the script should fetch the HTTP
page from Web App within SLA time (Service Level). If the SLA did not
met - then system should send alert. For sending alert, it interacts
with command line tools like email/nagios.
SLA's are in seconds.
Typical file structure of application is as follow:
/bin/httpresp.pl
/conf/{app1.conf, app2.conf, ..}
/logs/{app1.log, app2.log...}
The script use configuration file for each web app, configuration file
looks as below:
---------------
url = https://www.example.com
sla = 10
logfile = example.com.prod
-----------
The script looks as follow:
-------------------------------
#!/usr/bin/perl
use strict;
use warnings;
use AppConfig;
use Parallel::ForkManager;
use Log::Log4perl qw
easy);
use HTTP::Request::Common;
use LWP::UserAgent;
use HTTP::Cookies;
use Benchmark ':hireswallclock';
use Time::HiRes qw ( time alarm sleep );
use IPC::Run('run'); # Not shown the use of it in this script, is
useful to interact with nagios or run any other command line commands
our %LOGFILE;
our $APP_HOME = '/usr/local/application';
our $HTTPRESP = 'httpresp';
$LOGFILE{file} = $APP_HOME . "/" . $HTTPRESP . "/". 'logs/
generic.log';
my $MAX_PROC =
100; # restricting to 100 process at this moment, but can be
modified any time
$SIG{ALRM} = \&response_sla;
my $confdir = "$APP_HOME/$HTTPRESP/conf";
my $pm = new Parallel::ForkManager($MAX_PROC);
my $cnfhs = get_configurations();
foreach my $key ( keys %$cnfhs ) {
my $conf = $cnfhs->{$key};
my $pid = $pm->start and next;
my $logger = logger( $conf->get('logfile') );
crawl_sites( $conf, $logger );
$pm->finish;
}
$pm->wait_all_children;
sub get_configurations {
my @confs = glob( $confdir . "/*.conf" );
my $confhash = {};
foreach my $cnf (@confs) {
my $config = AppConfig->new();
$config->define('name=s');
$config->define('url=s');
$config->define('sla=s');
$config->define('logfile=s');
$config->define('appname=s');
$config->file($cnf);
$confhash->{$cnf} = $config;
}
return $confhash;
}
sub logger {
my $logfilename = shift;
$LOGFILE{file} = $logfilename;
my $conf = q(
log4perl.logger = INFO, FileApp
log4perl.appender.FileApp =
Log::Log4perl::Appender::File
log4perl.appender.FileApp.filename = sub {getLogfilename();}
log4perl.appender.FileApp.layout = PatternLayout
log4perl.appender.FileApp.layout.ConversionPattern = %d> %m%n
);
# Initialize logging behaviour
Log::Log4perl->init( \$conf );
# Obtain a logger instance
my $logger = get_logger();
return $logger;
}
sub getLogfilename {
return "$APP_HOME/$HTTPRESP/logs/" . $LOGFILE{file} . ".log";
}
sub crawl_sites {
my ( $conf, $logger ) = @_;
my $sla = $conf->get('sla');
my $url = $conf->get('url');
eval {
alarm($sla);
get_response( $url, $logger );
alarm(0);
};
if ( $@ =~ /SLA: DID NOT MET/ ) {
print "SLA DID NOT MET\n";
//code_to_interact_with_nagios
}else {
//code_to_interact_with_nagios
}
}
sub get_response {
my ( $url, $logger ) = @_;
$logger->info("Started monitoring: $url ");
my $ua = new LWP::UserAgent;
$ua->cookie_jar( HTTP::Cookies->new() );
$ua->agent(
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.18) Gecko/
20110614 Firefox/3.6.18'
);
$ua->ssl_opts( verify_hostname => 0 ); ##Skip verifying SSL
Certificate
my $t1 = Benchmark->new;
my $response = $ua->request( GET $url);
if ( $response->is_success ) {
$logger->info("SUCCESS: Got the response"); # or whatever
}
else {
$logger->info( "ERROR: the response was, " . $response-
my $t2 = Benchmark->new;
my $td = timediff( $t2, $t1 );
print "Total Response time: " . timestr($td) . "\n";
$logger->info( "Total Response time: " . timestr($td) );
}
sub response_sla {
die "SLA: DID NOT MET";
}
--------------------------------
Help me to improve this program - performance, scale to large (say 1k
or 2 k web apps) number of apps..
thank you for your time !
--sss
The intention to ask question in this forum is to improve this piece
of code (for performance and other best practices)
I have couple of Web Applications - need to monitor the HTTP Response
for them. Decided to use the Perl - the script should fetch the HTTP
page from Web App within SLA time (Service Level). If the SLA did not
met - then system should send alert. For sending alert, it interacts
with command line tools like email/nagios.
SLA's are in seconds.
Typical file structure of application is as follow:
/bin/httpresp.pl
/conf/{app1.conf, app2.conf, ..}
/logs/{app1.log, app2.log...}
The script use configuration file for each web app, configuration file
looks as below:
---------------
url = https://www.example.com
sla = 10
logfile = example.com.prod
-----------
The script looks as follow:
-------------------------------
#!/usr/bin/perl
use strict;
use warnings;
use AppConfig;
use Parallel::ForkManager;
use Log::Log4perl qw
use HTTP::Request::Common;
use LWP::UserAgent;
use HTTP::Cookies;
use Benchmark ':hireswallclock';
use Time::HiRes qw ( time alarm sleep );
use IPC::Run('run'); # Not shown the use of it in this script, is
useful to interact with nagios or run any other command line commands
our %LOGFILE;
our $APP_HOME = '/usr/local/application';
our $HTTPRESP = 'httpresp';
$LOGFILE{file} = $APP_HOME . "/" . $HTTPRESP . "/". 'logs/
generic.log';
my $MAX_PROC =
100; # restricting to 100 process at this moment, but can be
modified any time
$SIG{ALRM} = \&response_sla;
my $confdir = "$APP_HOME/$HTTPRESP/conf";
my $pm = new Parallel::ForkManager($MAX_PROC);
my $cnfhs = get_configurations();
foreach my $key ( keys %$cnfhs ) {
my $conf = $cnfhs->{$key};
my $pid = $pm->start and next;
my $logger = logger( $conf->get('logfile') );
crawl_sites( $conf, $logger );
$pm->finish;
}
$pm->wait_all_children;
sub get_configurations {
my @confs = glob( $confdir . "/*.conf" );
my $confhash = {};
foreach my $cnf (@confs) {
my $config = AppConfig->new();
$config->define('name=s');
$config->define('url=s');
$config->define('sla=s');
$config->define('logfile=s');
$config->define('appname=s');
$config->file($cnf);
$confhash->{$cnf} = $config;
}
return $confhash;
}
sub logger {
my $logfilename = shift;
$LOGFILE{file} = $logfilename;
my $conf = q(
log4perl.logger = INFO, FileApp
log4perl.appender.FileApp =
Log::Log4perl::Appender::File
log4perl.appender.FileApp.filename = sub {getLogfilename();}
log4perl.appender.FileApp.layout = PatternLayout
log4perl.appender.FileApp.layout.ConversionPattern = %d> %m%n
);
# Initialize logging behaviour
Log::Log4perl->init( \$conf );
# Obtain a logger instance
my $logger = get_logger();
return $logger;
}
sub getLogfilename {
return "$APP_HOME/$HTTPRESP/logs/" . $LOGFILE{file} . ".log";
}
sub crawl_sites {
my ( $conf, $logger ) = @_;
my $sla = $conf->get('sla');
my $url = $conf->get('url');
eval {
alarm($sla);
get_response( $url, $logger );
alarm(0);
};
if ( $@ =~ /SLA: DID NOT MET/ ) {
print "SLA DID NOT MET\n";
//code_to_interact_with_nagios
}else {
//code_to_interact_with_nagios
}
}
sub get_response {
my ( $url, $logger ) = @_;
$logger->info("Started monitoring: $url ");
my $ua = new LWP::UserAgent;
$ua->cookie_jar( HTTP::Cookies->new() );
$ua->agent(
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.2.18) Gecko/
20110614 Firefox/3.6.18'
);
$ua->ssl_opts( verify_hostname => 0 ); ##Skip verifying SSL
Certificate
my $t1 = Benchmark->new;
my $response = $ua->request( GET $url);
if ( $response->is_success ) {
$logger->info("SUCCESS: Got the response"); # or whatever
}
else {
$logger->info( "ERROR: the response was, " . $response-
}status_line );
my $t2 = Benchmark->new;
my $td = timediff( $t2, $t1 );
print "Total Response time: " . timestr($td) . "\n";
$logger->info( "Total Response time: " . timestr($td) );
}
sub response_sla {
die "SLA: DID NOT MET";
}
--------------------------------
Help me to improve this program - performance, scale to large (say 1k
or 2 k web apps) number of apps..
thank you for your time !
--sss