Linux ip-148-66-134-25.ip.secureserver.net 3.10.0-1160.119.1.el7.tuxcare.els10.x86_64 #1 SMP Fri Oct 11 21:40:41 UTC 2024 x86_64
Apache
: 148.66.134.25 | : 3.138.125.86
66 Domain
8.0.30
amvm
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
BLACK DEFEND!
README
+ Create Folder
+ Create File
/
usr /
lib /
fm-agent /
plugins /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
__init__.py
0
B
-rw-r--r--
apache.py
19.06
KB
-rw-r--r--
apache_kafka.py
12.9
KB
-rw-r--r--
apache_zookeeper.py
6.26
KB
-rw-r--r--
bandwidth.py
21.3
KB
-rw-r--r--
cassandra.py
9.21
KB
-rw-r--r--
cert.py
2.78
KB
-rw-r--r--
couch.py
9.5
KB
-rw-r--r--
cpu_usage.py
33
KB
-rw-r--r--
dem_plugin.py
6.08
KB
-rw-r--r--
disk.py
16.48
KB
-rw-r--r--
docker.py
38.41
KB
-rw-r--r--
elasticsearch.py
2.83
KB
-rw-r--r--
entropy.py
900
B
-rw-r--r--
exim.py
1.01
KB
-rw-r--r--
file_presence.py
5
KB
-rw-r--r--
haproxy.py
13.37
KB
-rw-r--r--
io_stats.py
13.41
KB
-rw-r--r--
jboss.py
13.46
KB
-rw-r--r--
jmx.py
8.02
KB
-rw-r--r--
linux_logs.py
3.4
KB
-rw-r--r--
lm_sensors.py
2.51
KB
-rw-r--r--
logstash_forwarder.py
1.58
KB
-rw-r--r--
memcache.py
5.99
KB
-rw-r--r--
memory_usage.py
26.11
KB
-rw-r--r--
mongo.py
15.96
KB
-rw-r--r--
mysql.py
19.74
KB
-rw-r--r--
nagios.py
5.36
KB
-rw-r--r--
nginx.py
11.96
KB
-rw-r--r--
nodejs.py
6.29
KB
-rw-r--r--
ntp.py
1.98
KB
-rw-r--r--
opcache.py
2.26
KB
-rw-r--r--
oracle.py
15.15
KB
-rw-r--r--
package_upgrade.py
8.08
KB
-rw-r--r--
phpfpm.py
5.51
KB
-rw-r--r--
ping.py
2.45
KB
-rw-r--r--
postfix.py
1.98
KB
-rw-r--r--
postgresql.py
19.13
KB
-rw-r--r--
process.py
16.32
KB
-rw-r--r--
rabbitmq.py
19.33
KB
-rw-r--r--
redis.py
11.19
KB
-rw-r--r--
sendmail.py
2.39
KB
-rw-r--r--
sysctl.py
1.46
KB
-rw-r--r--
tcp.py
6.26
KB
-rw-r--r--
template.py
3.28
KB
-rw-r--r--
tomcat.py
6.79
KB
-rw-r--r--
tomcat_jmx.py
15.82
KB
-rw-r--r--
unbound_dns.py
4.54
KB
-rw-r--r--
uptime.py
3.46
KB
-rw-r--r--
users.py
1.09
KB
-rw-r--r--
uwsgi.py
4.57
KB
-rw-r--r--
varnish.py
4.79
KB
-rw-r--r--
weblogic.py
13.38
KB
-rw-r--r--
weblogic12c.py
18.75
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : nginx.py
import re import agent_util import logging import glob from library.log_matcher import LogMatcher try: # Python3 from urllib.request import urlopen except ImportError: # Python2 from urllib2 import urlopen logger = logging.getLogger(__name__) def execute_query(query): ret, output = agent_util.execute_command(query) return str(output) LOG_COUNT_EXPRESSIONS = { '4xx': r"4\d{2}", '5xx': r"5\d{2}", '2xx': r"2\d{2}" } DEFAULT_NGINX_LOG = "/var/log/nginx/access.log" class NginxPlugin(agent_util.Plugin): textkey = "nginx" label = "Nginx" DEFAULTS = { "console_url": "http://localhost" } @classmethod def get_metadata(self, config): status = agent_util.SUPPORTED msg = None # check if nginx is even installed or running installed = agent_util.which("nginx") if not installed and not config.get('from_docker'): self.log.info("nginx binary not found") status = agent_util.UNSUPPORTED return {} if ("console_url" not in config): config.update(self.DEFAULTS) if status == agent_util.SUPPORTED and not config.get('from_docker'): query = '%s/nginx_status' % config['console_url'] nginxStatus = urlopen('%s/nginx_status' % config['console_url']).read().decode() if config.get("debug", False): self.log.debug("Nginx command '%s' output:" % query) self.log.debug(str(nginxStatus)) if not nginxStatus: status = agent_util.MISCONFIGURED msg = "The nginx_status path is not configured." data = { "active_connections": { "label": "Number of open connections", "options": None, "status": status, "error_message": msg, "unit": "connections" }, "accepted_connections": { "label": "Number of accepted connections per second", "options": None, "status": status, "error_message": msg, "unit": "connections/s" }, "dropped_connections": { "label": "Number of dropped connections per second", "options": None, "status": status, "error_message": msg, "unit": "connections/s" }, "handled_connections": { "label": "Number of handled connections per second", "options": None, "status": status, "error_message": msg, "unit": "connections/s" }, "requests_per_second": { "label": "Average requests per second", "options": None, "status": status, "error_message": msg, "unit": "requests/s" }, "requests_per_connection": { "label": "Number of requests per connection", "options": None, "status": status, "error_message": msg, "unit": "requests" }, "nginx_reading": { "label": "Read request header", "options": None, "status": status, "error_message": msg, "unit": "requests/s" }, "nginx_writing": { "label": "Read request body", "options": None, "status": status, "error_message": msg, "unit": "requests/s" }, "nginx_waiting": { "label": "Keep alive connections", "options": None, "status": status, "error_message": msg, "unit": "connections/s" }, "4xx": { "label": "Rate of 4xx's events", "options": None, "status": status, "error_message": msg, "unit": "entries/s" }, "2xx": { "label": "Rate of 2xx's events", "options": None, "status": status, "error_message": msg, "unit": "entries/s" }, "5xx": { "label": "Rate of 5xx's events", "options": None, "status": status, "error_message": msg, "unit": "entries/s" } } return data @classmethod def get_metadata_docker(self, container, config): if 'console_url' not in config: try: ip = agent_util.get_container_ip(container) config['console_url'] = 'http://%s' % ip except Exception: import sys _, e, _ = sys.exc_info() self.log.exception(e) config['from_docker'] = True return self.get_metadata(config) def _calculate_delta(self, textkey, value, is_rate=True): """ Extract the previous cached value, calculate the delta, and store the current one. """ cached = self.get_cache_results('nginx:%s' % textkey, None) if not cached: self.log.info('Empty nginx cache! Building for first time') self.cache_result('nginx:%s' % textkey, None, value, replace=True) return None delta, previous_value = cached[0] self.cache_result('nginx:%s' % textkey, None, value, replace=True) if previous_value > value: return None if is_rate: return (value - previous_value) / float(delta) else: return (value - previous_value) def check(self, textkey, data, config): if not config.get('console_url'): config.update(self.DEFAULTS) result = urlopen('%s/nginx_status' % config['console_url']).read().decode() statLines = result.split('\n') p = re.compile(r'(\d+)') connections = p.findall(statLines[2]) connectionsByStatus = p.findall(statLines[3]) result = 0 status_map = { 'nginx_reading': int(connectionsByStatus[0]), 'nginx_writing': int(connectionsByStatus[1]), 'nginx_waiting': int(connectionsByStatus[2]) } if textkey == 'active_connections': active_connections = p.findall(statLines[0]) result = int(active_connections[0]) elif textkey == 'requests_per_connection': active_connections = p.findall(statLines[0]) active_connections = int(active_connections[0]) requests = int(connections[2]) requests_textkey = '%s:%s' % (textkey, 'requests') requests_diff = self._calculate_delta(requests_textkey, requests, is_rate=False) if active_connections and requests_diff: return requests_diff / active_connections else: return None # All these values use the delta calculation method elif textkey in ('nginx_reading', 'nginx_writing', 'nginx_waiting', 'requests_per_second', 'accepted_connections', 'handled_connections', 'handles_request', 'dropped_connections'): # The only difference is in how they get the current value if textkey in ('nginx_reading', 'nginx_writing', 'nginx_waiting'): current_res = status_map[textkey] elif textkey == 'accepted_connections': current_res = int(connections[0]) elif textkey == 'handled_connections': current_res = int(connections[1]) elif textkey in ('requests_per_second'): current_res = int(connections[2]) elif textkey in ('dropped_connections'): current_res = int(connections[0]) - int(connections[1]) return self._calculate_delta(textkey, current_res) # Handle the log count metrics elif textkey in ('4xx', '5xx', '2xx'): log_files = [DEFAULT_NGINX_LOG] for key, value in config.items(): if key not in ['debug', 'console_url']: value = value.strip('"').strip("'") if '*' in value: log_files += glob.glob(value) else: log_files += [value] file_inodes = {} total_metrics = 0 timescale = 1 column = 8 expression = LOG_COUNT_EXPRESSIONS.get(textkey) for target in log_files: # Extract the file current inode try: file_inodes[target] = LogMatcher.get_file_inode(target) except OSError: import sys _, error, _ = sys.exc_info() logging.error("Error opening %s file." % (target)) logging.error(error) continue # Extract data from the agent cache about the check log_data = self.get_cache_results(textkey, "%s/%s" % (self.schedule.id, target)) if log_data: log_data = log_data[0][-1] else: log_data = dict() last_line_number = log_data.get('last_known_line') stored_inode = log_data.get('inode') results = log_data.get('results', []) # Extract the lines of the file. try: total_lines, current_lines = LogMatcher.get_file_lines( last_line_number, target, file_inodes[target], stored_inode) except IOError: import sys _, e, _ = sys.exc_info() logging.error("Unable to read log file: %s. Make sure fm-agent user belongs to group adm" % str(e)) continue logging.info("Stored line %s Current line %s Looking at %s lines" % ( str(last_line_number), str(total_lines), str(len(current_lines)) )) # Perform the matching of the expression in the lines log_matcher = LogMatcher(stored_inode) results = log_matcher.match_in_column(current_lines, expression, column) metric, results = log_matcher.calculate_metric(results, timescale) total_metrics += metric and metric or 0 logging.info('Found %s instances of "%s" in %s' % ( str(metric or 0), expression, target) ) previous_result = self.get_cache_results( textkey, "%s/%s" % (self.schedule.id, target) ) cache_data = dict( inode=file_inodes[target], last_known_line=total_lines, results=results ) self.cache_result(textkey, "%s/%s" % (self.schedule.id, target), cache_data, replace=True) if not previous_result: result = None else: delta, prev_data = previous_result[0] try: prev_count = prev_data.get('results')[0][-1] curr_count = cache_data.get('results')[0][-1] result = curr_count / float(delta) except IndexError: result = None return result def check_docker(self, container, textkey, data, config): if 'console_url' not in config: try: ip = agent_util.get_container_ip(container) config['console_url'] = 'http://%s' % ip except Exception: import sys _, e, _ = sys.exc_info() self.log.exception(e) config['from_docker'] = True return self.check(textkey, data, config)
Close