Linux ip-172-26-2-223 5.4.0-1018-aws #18-Ubuntu SMP Wed Jun 24 01:15:00 UTC 2020 x86_64
Apache
: 172.26.2.223 | : 18.216.93.197
Cant Read [ /etc/named.conf ]
8.1.13
www
www.github.com/MadExploits
Terminal
AUTO ROOT
Adminer
Backdoor Destroyer
Linux Exploit
Lock Shell
Lock File
Create User
CREATE RDP
PHP Mailer
BACKCONNECT
UNLOCK SHELL
HASH IDENTIFIER
CPANEL RESET
CREATE WP USER
BLACK DEFEND!
README
+ Create Folder
+ Create File
/
usr /
share /
rspamd /
lualib /
redis_scripts /
[ HOME SHELL ]
Name
Size
Permission
Action
bayes_cache_check.lua
542
B
-rw-r--r--
bayes_cache_learn.lua
1.74
KB
-rw-r--r--
bayes_classify.lua
1.21
KB
-rw-r--r--
bayes_learn.lua
1.46
KB
-rw-r--r--
bayes_stat.lua
554
B
-rw-r--r--
neural_maybe_invalidate.lua
754
B
-rw-r--r--
neural_maybe_lock.lua
567
B
-rw-r--r--
neural_save_unlock.lua
744
B
-rw-r--r--
neural_train_size.lua
594
B
-rw-r--r--
ratelimit_check.lua
3.26
KB
-rw-r--r--
ratelimit_cleanup_pending.lua
1.09
KB
-rw-r--r--
ratelimit_update.lua
3.54
KB
-rw-r--r--
Delete
Unzip
Zip
${this.title}
Close
Code Editor : bayes_learn.lua
-- Lua script to perform bayes learning -- This script accepts the following parameters: -- key1 - prefix for bayes tokens (e.g. for per-user classification) -- key2 - boolean is_spam -- key3 - string symbol -- key4 - boolean is_unlearn -- key5 - set of tokens encoded in messagepack array of strings -- key6 - set of text tokens (if any) encoded in messagepack array of strings (size must be twice of `KEYS[5]`) local prefix = KEYS[1] local is_spam = KEYS[2] == 'true' and true or false local symbol = KEYS[3] local is_unlearn = KEYS[4] == 'true' and true or false local input_tokens = cmsgpack.unpack(KEYS[5]) local text_tokens if KEYS[6] then text_tokens = cmsgpack.unpack(KEYS[6]) end local hash_key = is_spam and 'S' or 'H' local learned_key = is_spam and 'learns_spam' or 'learns_ham' redis.call('SADD', symbol .. '_keys', prefix) redis.call('HSET', prefix, 'version', '2') -- new schema redis.call('HINCRBY', prefix, learned_key, is_unlearn and -1 or 1) -- increase or decrease learned count for i, token in ipairs(input_tokens) do redis.call('HINCRBY', token, hash_key, is_unlearn and -1 or 1) if text_tokens then local tok1 = text_tokens[i * 2 - 1] local tok2 = text_tokens[i * 2] if tok1 then if tok2 then redis.call('HSET', token, 'tokens', string.format('%s:%s', tok1, tok2)) else redis.call('HSET', token, 'tokens', tok1) end redis.call('ZINCRBY', prefix .. '_z', is_unlearn and -1 or 1, token) end end end
Close