红队作战中的一些工具分享

作者:Sec-Labs | 发布时间:

redteam-tools

红队作战中的一些工具分享。

😄信息收集

红队作战时信息收集部分是非常重要的,在大批量的资产中快速定位脆弱性的资产必不可少需要使用到一些辅助工具。后期将开源、分享一些团队积累的工具、脚本。

1.getinfo.sh - 内网信息收集shell脚本.

2.httpx.sh - httpx+nuclei批量漏洞探测脚本.

3.httpx-url.sh - httpx+nuclei单URL漏洞探测脚本.

4.get_title.py - 快速从IP+端口从提取HTTP

5.icohash.sh - 获取icon_hash

getinfo.sh

#!/bin/bash
# by forum.ywhack.com

#输出文件
filename=$(date +%s)'.log'

echo "信息收集"
echo -e "\n" | tee -a $filename
echo "账户信息收集" | tee -a $filename
cat /etc/passwd | tee -a $filename
echo -e "\n" | tee -a $filename
echo "shadow" | tee -a $filename
cat /etc/shadow | tee -a $filename
echo -e "\n" | tee -a $filename
echo "进程信息收集" | tee -a $filename
ps aux | tee -a $filename
echo -e "\n" | tee -a $filename
echo "网络连接" | tee -a $filename
netstat -antlp | tee -a $filename
echo -e "\n" | tee -a $filename
echo "当前用户:" $(whoami) 2>/dev/null | tee -a $filename
echo -e "\n" | tee -a $filename
echo "端口监听" | tee -a $filename
netstat -lnpt | tee -a $filename
echo -e "\n" | tee -a $filename
echo "可登陆用户" | tee -a $filename
cat /etc/passwd | grep -E -v 'nologin$|false' | tee -a $filename
echo -e "\n" | tee -a $filename
echo "增加用户的日志" | tee -a $filename
grep "useradd" /var/log/secure  | tee -a $filename
echo -e "\n" | tee -a $filename
echo "History操作提取" | tee -a $filename
cat ~/.*history | tee -a $filename
echo -e "\n" | tee -a $filename
echo "登录成功的IP" | tee -a $filename
grep "Accepted " /var/log/secure* | awk '{print $11}' | sort | uniq -c | sort -nr | more | tee -a $filename   
echo -e "\n" | tee -a $filename
echo "查看路由表" | tee -a $filename
route -n | tee -a $filename
echo -e "\n" | tee -a $filename
echo "查看 SSH key" | tee -a $filename
sshkey=${HOME}/.ssh/authorized_keys
if [ -e "${sshkey}" ]; then
    cat ${sshkey} | tee -a $filename
else
    echo -e "SSH key文件不存在\n" | tee -a $filename
fi
echo -e "\n" | tee -a $filename
echo "查看 known_hosts" | tee -a $filename
cat ~/.ssh/known_hosts | tee -a $filename
echo -e "\n" | tee -a $filename
echo "查找WEB-INF" | tee -a $filename
find / -name *.properties 2>/dev/null | grep WEB-INF | tee -a $filename
echo -e "\n" | tee -a $filename
echo "user|pass|pwd|uname|login|db_" | tee -a $filename
find / -name "*.properties" | xargs egrep -i "user|pass|pwd|uname|login|db_" | tee -a $filename
echo -e "\n" | tee -a $filename
echo "jdbc:|pass=|passwd=" | tee -a $filename
find / -regex ".*\.properties\|.*\.conf\|.*\.config\|.*\.sh" | xargs grep -E "=jdbc:|pass=|passwd=" | tee -a $filename
echo -e "\n" | tee -a $filename
# Author cances
echo "ip和网卡信息" | tee -a $filename
ip a | awk '{print $2,$4}' | tee -a $filename
echo -e "\n" | tee -a $filename
echo "可登陆用户" | tee -a $filename
cat /etc/passwd | grep -E -v 'sync$|halt$|nologin$|false|shutdown' | tee -a $filename
echo -e "\n" | tee -a $filename
echo "用户登陆日志" | tee -a $filename
lastlog | tee -a $filename
echo -e "\n" | tee -a $filename
echo "查看 hosts" | tee -a $filename
cat /etc/hosts | tee -a $filename
echo -e "\n" | tee -a $filename
echo "查看 系统版本" | tee -a $filename
cat /etc/*-release | tee -a $filename
echo -e "\n" | tee -a $filename
echo "查看 内核版本" | tee -a $filename
uname -mrs | tee -a $filename

httpx.sh

#!/bin/bash
#forum.ywhack.com
VERSION="PoC_Info v1.0"

TARGET="$1"
WORKSPACE=$TARGET
WORKING_DIR="$(cd "$(dirname "$0")" ; pwd -P)"
RESULTS_PATH="$WORKING_DIR"

RED="\033[1;31m"
GREEN="\033[1;32m"
BLUE="\033[1;36m"
YELLOW="\033[1;33m"
RESET="\033[0m"

# filename=$TARGET_$(date +%s)'.txt'
time=$(date "+%Y年%m月%d日%H时%M分%S秒")

# logo
Logo(){
echo  "${GREEN}    
 ${RED}v$VERSION${RESET}  ${GREEN} 
 by ${YELLOW}@Blackhold${RESET}\n "
}
# 提示
checkArgs(){
    if [[ $# -eq 0 ]]; then
        echo "${RED}[!] ERROR:${RESET} Invalid\n"
        echo "${GREEN}[+] USAGE: ${RESET}$0 <file>\n"
        exit 1
    elif [ ! -s $1 ]; then
        echo "${RED}[!] ERROR:${RESET} File is empty and/or does not exists!\n"
        echo "${GREEN}[+] USAGE:${RESET}$0 <file>\n"
        exit 1
    fi
}
# 信息开始
infostart(){
     echo "${GREEN}[+] Running InfoExp.${RESET}"
     cat $TARGET | /Users/blackhold/Tools/scan/httpx_0.0.8/httpx -silent | /Users/blackhold/Tools/scan/nuclei_2.1.0/nuclei -t /Users/blackhold/Tools/scan/nuclei_2.1.0/nuclei-templates/ -o results_$time.txt
     echo "\n"
     echo "${RED}已经在当前目录生成${RESET}${GREEN}results_$time.txt${RESET}${RED},请注意查看!${RESET}"
     echo "${RED}运行结束!bye.${RESET}  by ${YELLOW}@Blackhold${RESET}"
}

Logo
checkArgs $TARGET
infostart
exit 1

httpx-url.sh

#!/bin/bash
#forum.ywhack.com
VERSION="PoC_Info v1.0"

TARGET="$1"
WORKSPACE=$TARGET
WORKING_DIR="$(cd "$(dirname "$0")" ; pwd -P)"
RESULTS_PATH="$WORKING_DIR"

RED="\033[1;31m"
GREEN="\033[1;32m"
BLUE="\033[1;36m"
YELLOW="\033[1;33m"
RESET="\033[0m"

# filename=$TARGET_$(date +%s)'.txt'
time=$(date "+%Y年%m月%d日%H时%M分%S秒")

# logo
Logo(){
echo  "${GREEN}    
 ${RED}v$VERSION${RESET}  ${GREEN} 
 by ${YELLOW}@Blackhold${RESET}\n "
}
# 提示

# 信息开始
infostart(){
     echo "${GREEN}[+] Running InfoExp.${RESET}"
     echo "$TARGET" | /Users/blackhold/Tools/scan/httpx_0.0.8/httpx -silent | /Users/blackhold/Tools/scan/nuclei_2.1.0/nuclei -t /Users/blackhold/Tools/scan/nuclei_2.1.0/nuclei-templates/ -o results_$time.txt
     echo "\n"
     echo "${RED}已经在当前目录生成${RESET}${GREEN}results_$time.txt${RESET}${RED},请注意查看!${RESET}"
     echo "${RED}运行结束!bye.${RESET}  by ${YELLOW}@Blackhold${RESET}"
}

Logo
#checkArgs $TARGET
infostart
exit 1

get_title.py

# -*- coding:utf-8 -*-
# 棱角团队 by shihuang
# python3
import requests
from bs4 import BeautifulSoup
from threading import Thread
from queue import Queue
from concurrent.futures import ThreadPoolExecutor, as_completed
import time,random
requests.packages.urllib3.disable_warnings()


def get_http_banner(url):
    try:
        s = requests.session()
        s.keep_alive = False  # 关闭多余连接
        user_agent_list = [
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36",
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36",
            "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.186 Safari/537.36",
            "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36"
        ]
        r = s.get('http://' + url,
                  headers={'UserAgent': random.choice(user_agent_list), 'Accept-Language': 'zh-CN,zh;q=0.9'}, timeout=1,
                  verify=False, allow_redirects=True)
        if r.status_code == 400:
            r = s.get('https://' + url,
                      headers={'UserAgent': random.choice(user_agent_list), 'Accept-Language': 'zh-CN,zh;q=0.9'},
                      timeout=1, verify=False,
                      allow_redirects=True)
            encoding = r.encoding
            soup = BeautifulSoup(r.text.encode(encoding).decode('utf-8'), 'html.parser')
            if soup.title == None:
                #args = ('https://' + url, "Title could not be found!", r.status_code, ip_port[0], ip_port[1])
                #self.q.put(args)
                return ["Title could not be found!", r.status_code, 'https://' + url,url]
            else:
                #args = ('https://' + url, soup.title.text.strip('\n').strip(), r.status_code, ip_port[0], ip_port[1])
                #self.q.put(args)
                return [soup.title.text.strip('\n').strip(), r.status_code, 'https://' + url,url]
        else:
            encoding = r.encoding
            soup = BeautifulSoup(r.text.encode(encoding).decode('utf-8'), 'html.parser')
            if soup.title == None:
                #args = ('http://' + url, "Title could not be found!", r.status_code, ip_port[0], ip_port[1])
                #self.q.put(args)
                return ["Title could not be found!", r.status_code, 'http://' + url,url]
            else:
                #args = ('http://' + url, soup.title.text.strip('\n').strip(), r.status_code, ip_port[0], ip_port[1])
                #self.q.put(args)
                return [soup.title.text.strip('\n').strip(), r.status_code, 'http://' + url,url]
    except Exception as ex:
        print(ex)
        pass


def out(msg):
    msg='"'+'","'.join(msg)+'"'
    with open('out.csv', 'a',encoding='utf-8-sig') as f:
        f.write(msg + '\n')

def main():
    f = open('url.txt','r')
    out(["url", "title", "state_code"])
    with ThreadPoolExecutor(max_workers=100) as t:
        obj_list = []
        for url in f.readlines():
            url = url.strip('\n')
            #print(url)
            obj = t.submit(get_http_banner, url)
            obj_list.append(obj)

        for future in as_completed(obj_list):
            data = future.result()
            #print(type(data))
            if isinstance(data,list):
                print(data)
                args = [str(data[2]),str(data[0]),str(data[1])]
                out(args)




if __name__ == "__main__":
    start = time.time()
    main()
    print('[info]耗时:%s' % (time.time() - start))

icohash.sh

#!/bin/bash
target="$1"
start(){
     echo "$target hash:"
     curl -s -L -k $target | python3 -c 'import mmh3,sys,codecs; print(mmh3.hash(codecs.encode(sys.stdin.buffer.read(),"base64")))'
}
start
exit 1

项目地址

https://github.com/r0eXpeR/redteam-tools

标签:工具分享