Linux ip-148-66-134-25.ip.secureserver.net 3.10.0-1160.119.1.el7.tuxcare.els10.x86_64 #1 SMP Fri Oct 11 21:40:41 UTC 2024 x86_64
Apache
: 148.66.134.25 | : 3.21.21.209
66 Domain
8.0.30
amvm
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
BLACK DEFEND!
README
+ Create Folder
+ Create File
/
usr /
lib /
fm-agent /
library /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
__init__.py
0
B
-rw-r--r--
agent.py
96.8
KB
-rw-r--r--
agent_exceptions.py
110
B
-rw-r--r--
agent_util.py
8.58
KB
-rw-r--r--
aggregator.py
14.89
KB
-rw-r--r--
anomaly.py
2.19
KB
-rw-r--r--
blacklister.py
809
B
-rw-r--r--
container_discovery.py
3.3
KB
-rw-r--r--
display.py
2.06
KB
-rw-r--r--
forticlient_helper.py
2.59
KB
-rw-r--r--
inspector.py
15.7
KB
-rw-r--r--
iperf3.py
2.12
KB
-rw-r--r--
log_matcher.py
4.27
KB
-rw-r--r--
maintenance.py
3.61
KB
-rw-r--r--
pickle_database.py
1.28
KB
-rw-r--r--
plugin_driver.py
4.78
KB
-rw-r--r--
plugin_manager.py
11.04
KB
-rw-r--r--
process_manager.py
851
B
-rw-r--r--
progress_printer.py
837
B
-rw-r--r--
result_queue.py
1.99
KB
-rw-r--r--
schedule.py
3.19
KB
-rw-r--r--
threshold.py
1.5
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : aggregator.py
from datetime import datetime try: # Python 2.x import httplib except: import http.client as httplib import base64 import logging import random import re import socket import sys import time import traceback try: # Python 2.x import urlparse except: import urllib.request as urlrequest import urllib.parse as urlparse try: import json except ImportError: try: import simplejson as json # it's possible that we may not need json for the action that we're taking. # for example, for the rpm post install script, on a python version that # doesn't have json, we'll get this far in the code. but the post # install doesn't use json, so we're fine except ImportError: json = None try: # trying to import SSL to make sure we can use unverified SSLs if possible # otherwise we'll set this and later keep from using the wrong connection settings import ssl except: ssl = None from inspector import get_fqdn, get_server_name, get_platform_uuids class Client(object): ''' This is a client used for communicating with an aggregator, either *the* Aggregator or an Onsight instance. ''' def __init__(self, agg_url, version, server_key=None, customer_key=None, proxy_config=None): self.log = logging.getLogger(self.__class__.__name__) self.customer_key = customer_key self.server_key = server_key self.agg_url = agg_url self.version = version self.call_timeout = 25 """ Proxy support This assumes the format of the option is: scheme://uname:password@hostname:port uname, password are optional For determining the proxy, the installer should update the agent configuration with an agent_proxy section, because the environment may not be available in the agent cron job. """ self.proxy = None proxies_enabled = True if proxy_config is not None: pev = proxy_config.get('enabled', True) if type(pev) == type(''): pev = pev.lower() if pev in ['0', 'false']: pev = False else: pev = True proxies_enabled = pev if proxies_enabled: self.proxy = self.parse_proxy_info(proxy_config) def parse_proxy_info(self, proxy_config): proxy_val = None if not proxy_config: return proxy_val try: for scheme in ['https', 'http']: proxy_val = proxy_config.get(scheme) if not proxy_val: continue parsed_uri = urlparse.urlparse(proxy_val) return parsed_uri return None except Exception: err = sys.exc_info()[1] error = str(err) self.log.info('parse_proxy_info error: {}'.format(error )) return None def create_connection(self, url, port): connection = None if port in (443, 8443): # XXX Below is a workaround for later versions of Python changing how self-signed # certs are handled. # Special logic because HPUX is made by satan and has diverging ssl and socket library versions # that cause issues trying to get the syncs to work properly with the agent if 'hp-ux' in sys.platform: connection = httplib.HTTPSConnection(url, port, context=ssl._create_unverified_context()) else: self.log.debug(sys.version_info) if ssl and hasattr(ssl, '_create_unverified_context'): self.log.debug('ssl has _create_unverified_context attribute: %s', hasattr(ssl, '_create_unverified_context')) connection = httplib.HTTPSConnection(url, port, timeout=self.call_timeout, context=ssl._create_unverified_context()) elif sys.version_info >= (2, 6, 0): connection = httplib.HTTPSConnection(url, port, timeout=self.call_timeout) else: connection = httplib.HTTPSConnection(url, port) else: if sys.version_info >= (2, 6, 0) and 'hp-ux' not in sys.platform: connection = httplib.HTTPConnection(url, port, timeout=self.call_timeout) else: connection = httplib.HTTPConnection(url, port) return connection def call(self, action, data={}, method="POST"): agg_urls = [url.strip() for url in self.agg_url.split(",") if url.strip()] random.shuffle(agg_urls) error = None for agg_url in agg_urls: try: url_parts = urlparse.urlparse(agg_url) if url_parts[0] == "" or url_parts[1] == "": url_parts = urlparse.urlparse("http://" + agg_url) if url_parts[0] not in ('http', 'https') or \ url_parts[1] == "": raise Exception("Invalid aggregator URL") except Exception: raise ValueError("Invalid aggregator URL format.") agg_url = url_parts[1] if ':' in agg_url: url, port = agg_url.split(":") port = int(port or 443) else: url = agg_url port = 443 if "/v" in agg_url: url = url[:url.index("/v")] self.log.info("aggregator call %r to %r:%d", action, url, port) params = None uri = "/v%s/%s" % (self.version, action) if method == "POST": data['agent_version'] = self.version data['metadata_version'] = 'v3' data['agent_time'] = time.mktime(datetime.now().timetuple()) self.log.debug('Data: %r' % data) params = json.dumps(data) headers = { "Content-type": "application/json", "Accept": "application/json" } if self.server_key: headers['Authorization'] = self.server_key connection = None if self.proxy: connection = httplib.HTTPSConnection( self.proxy.hostname, self.proxy.port, timeout=self.call_timeout, context=ssl._create_unverified_context() ) self.log.info('Connection via proxy {}:{}'.format( connection.host, connection.port )) uname = self.proxy.username pswd = self.proxy.password if uname and pswd: creds = "{}:{}".format(uname, pswd) auth_hash = base64.b64encode(bytes(creds, 'utf-8')).decode("utf-8") auth_value = 'Basic {}'.format(auth_hash) connection.set_tunnel( url, port, headers={"Proxy-Authorization": auth_value} ) else: connection.set_tunnel(url, port) else: connection = self.create_connection(url, port) try: self.log.debug("%s - %s - %s - %s" % (method, uri, params, headers)) connection.request(method, uri, params, headers) resp = connection.getresponse() if method == "POST" and int(resp.status) != 201: raise Exception(resp.reason) elif method != "POST" and int(resp.status) != 200: raise Exception(resp.reason) return json.loads(resp.read().decode('utf-8')) except Exception: err = sys.exc_info()[1] error = str(err) self.log.error('Unable to connect: %s' % error) continue raise Exception(error) def get_local_ip(self): with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as client: client.settimeout(5.0) if ':' in self.agg_url.split(',')[0]: host, port = self.agg_url.split(',')[0].split(':') else: host = self.agg_url port = 0 self.log.info('get_local_ip: creating connection to {} {}'.format(host, port)) client.connect((host, int(port))) return client.getsockname()[0] def sync( self, results, outage_events, metadata, countermeasures_metadata, facts, discovered_containers, deleted_containers, register, register_custom_metrics, incidents, config, ips, auto_topo_scans, force_send_schedules=False, command_results={}, dem_enabled = False, dem_service_results={}, fortisase_attributes={} ): payload = dict( results=results, outage_events=outage_events, metadata=metadata, countermeasures_metadata=countermeasures_metadata, facts=facts, discovered_containers=discovered_containers, deleted_containers=deleted_containers, force_send_schedules=force_send_schedules, command_results=command_results, register=register, register_custom_metrics=register_custom_metrics, incidents=incidents, config=config, ips=ips, auto_topo_scans=auto_topo_scans, dem_enabled=dem_enabled ) if metadata: platform_uuids = get_platform_uuids() if platform_uuids: payload['fm_server_identifiers'] = platform_uuids if dem_enabled: for ds in dem_service_results.keys(): payload[ds] = dem_service_results[ds] if fortisase_attributes: payload['fortisase_attributes'] = fortisase_attributes return self.call("sync", payload) def notify_of_uninstall(self, remove_instance=False): success = False self.log.info("beginning uninstall notification") try: self.call("uninstall", data={'remove_instance': remove_instance}) except: self.log.error("error connecting") else: self.log.info("notified successfully") success = True return success def handshake(self, ips, properties, attributes=None): self.log.info('Beginning handshake') hostname = get_fqdn() # Add platform for aggregator server match data = { 'customer_key': self.customer_key, 'hostname': hostname, 'ips': ips } platform_uuids = get_platform_uuids() if platform_uuids: data['fm_server_identifiers'] = platform_uuids if properties.get('handshake_type', 'standard').lower() == 'forticlient': from os import _exit from forticlient_helper import ForticlientHelper try: helper = ForticlientHelper() data['forticlient_metadata'] = helper.get_handshake_data() properties.pop('handshake_type') server_name = get_server_name() if server_name: data['server_name'] = server_name except: self.log.exception('Handshake error') _exit(-3) platform_uuids = get_platform_uuids() if platform_uuids: data['fm_server_identifiers'] = platform_uuids data.update(properties) data.update(self.parse_attributes(attributes, properties)) self.log.info('Handshaking with aggregator:') self.log.info(json.dumps(data, indent=1)) server_key = None found_server = False success = False error = None log_level = None try: data = self.call('handshake', data) self.log.debug(data) server_key = data['server_key'] found_server = data['found_server'] if 'commands' in data and 'log_level' in data['commands']: log_level = data['commands']['log_level'] except Exception: err = sys.exc_info()[1] self.log.error('Error connecting: %s' % err) error = err else: success = True self.log.info('Connected successfully! Received server key: %r', server_key) if not (data.get('server_key') or data.get('found_server')): error = 'Error performing handshake with the aggregator, please check connectivity' success = False if 'error' in data and data.get('error'): success = False if data['error'] == 'invalid_credentials': error = 'Incorrect username/password' elif data['error'] == 'invalid_customer_key': error = 'Unknown/invalid customer key' elif data['error'] == 'invalid_server_group': error = 'Unknown server group' elif data['error'] == 'inactive_customer': error = 'Customer inactive' elif data['error'] == 'no_user_found_for_customer': error = 'No active user found for the customer' elif data['error'].startswith('invalid_template_id'): error = 'Unknown template id %s' % data['error'].replace('invalid_template_id:', '') else: error = data['error'] self.log.info('Success: %s, server key: %s, found server: %s, error: %s', success, server_key, found_server, error) return success, server_key, found_server, error, log_level def maintenance(self, duration, metric_tags=None): """ Request to the aggregator that it starts a maintenance for our server. It requires the duration of the maintenance in minutes, and accepts an optional list of tags to mark metrics that are going to be affected by it. """ payload = { 'duration': duration, 'metric_tags': metric_tags and metric_tags or [] } return self.call('maintenance', payload) def end_maintenance(self): """ Request to the aggregator that it ends all active maintenances for our server. POST call with no parameters. """ return self.call('end_maintenance') def parse_attributes(self, attributes, properties): """ Receive a dictionary containing the attributes stored in the manifest file. Parse them accordingly and store them in an attribute dictionary. Attributes get parsed only if the customer key is present in the properties. """ if 'customer_key' in properties: return attributes and {'attributes': attributes} or {} else: return {}
Close