前言
在一次渗透测试的过程中,给了几百个IP,不知道哪些IP是有网站的,以及开放网站的端口,无法使用AWVS扫描,所以就写了一个批量探测IP端口检查是否有存在网站导入AWVS进行扫描的脚本。师傅们轻喷。
使用语言
python3
脚本如下
import win32com.clientimport timeimport socketimport requestsfrom bs4 import BeautifulSoup as bsimport csvimport osfrom multiprocessing.dummy import Pool as ThreadPoolfrom requests.packages.urllib3.exceptions import InsecureRequestWarningrequests.packages.urllib3.disable_warnings(InsecureRequestWarning)def ip_get(): ipq = input('请输入要扫描的IP --- 1:单个IP 2:IP文件 -->') while ipq not in ['1','2']: ipq = input('请输入要扫描的IP --- 1:单个IP 2:IP文件 -->') if ipq == '1': ip_f = input('请输入IP:') IP.append(ip_f) elif ipq == '2': file = input('请输入IP文件:') f = open(file, 'r') for i in f.readlines(): ip_f = i.strip() IP.append(ip_f)def port_jj(): port = input('请输入要扫描的端口 --- 1:所有端口 2:指定端口,多个使用,分隔 3:指定端口范围 -->') while port not in ['1','2','3']: port = input('请输入要扫描的端口 --- 1:所有端口 2:指定端口,多个端口使用,分隔 3:指定端口范围 -->') if port == '1': for port in range(1, 65536): PORTS.append(port) elif port =='2': port_s = input('请输入要扫描的端口:') port_s = port_s.split(",") for port in port_s: port=int(port) PORTS.append(port) elif port =='3': port_f=input('请输入要扫描的范围:') port_f = port_f.replace('-',',') port_f = port_f.split(",") a = int(port_f[0]) b= int(port_f[1]) for port in range(a,b+1): PORTS.append(port)def tcp_scan(post): for ip in IP: s = socket.socket(2,1) s.settimeout(out_time) c = s.connect_ex((ip,post)) if c == 0: print("%s:%sopen!" % (ip,post)) IP_PORT.append({'IP':ip,'PORT':str(post)}) s.close()def web_scan(URL): myua = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36" headers={'User-Agent':myua,'Connection': 'close'} try: r = requests.get(url=URL,headers=headers,timeout=5,verify=False) soup = bs(r.text.encode('utf-8'), 'html.parser') title = soup.find('title').text if r.status_code == 200 and soup != '': print(URL + ' --- 发现web --- title:'+title) CODE.append(('URL',URL)) elif r.status_code ==200 and soup =='': print(URL + ' --- 发现web --- title:无') CODE.append(('URL',URL)) elif r.status_code != 200: URL = URL.replace('http', 'https') r1 = requests.get(url=URL, headers=headers,verify=False, timeout=5) soup = bs(r.text.encode('utf-8'), 'html.parser') title = soup.find('title').text if r1.status_code == 200 and soup != '': print(URL + ' --- 发现web --- title:' + title) CODE.append(('URL',URL)) elif r1.status_code ==200 and soup =='': print(URL + '--- 发现web --- title:无') elif r.status_code != 404: print(URL + ' --- 发现web --- title: 无') CODE.append(('URL', URL)) except : try: URL=URL.replace('http','https') r = requests.get(url=URL, headers=headers,verify=False, timeout=5) soup = bs(r.text.encode('utf-8'), 'html.parser') title = soup.find('title').text if r.status_code != 404: print(URL + ' --- 发现web --- title: 无') CODE.append(('URL',URL)) except: passdef wirte_file(): f = open('lemonlove7.csv','w',newline='') write= csv.writer(f) for i in CODE: write.writerow(i) f.close() if __name__ == '__main__': CODE=[] PORTS=[] URL=[] IP_PORT=[] IP=[] out_time = 1 start_time = time.time() ip_get() port_jj() xc = input(('请输入进程数:')) xc = int(xc) pool = ThreadPool(processes=xc) results = pool.map(tcp_scan,PORTS) pool.close() pool.join() for u in IP_PORT: a = u['IP'] b = u['PORT'] URL.append('http://'+a+':'+b) scan=ThreadPool(processes=xc) scan_s=scan.map(web_scan,URL) scan.close() scan.join() wirte_file() end_time = time.time() print("耗时:",end_time - start_time) speak = win32com.client.Dispatch('SAPI.SPVOICE') speak.Speak('存活的url已经保存到lemonlove7.csv文件中')
说明
使用python的多进程模块进行检测,加快速度,要扫描的文件可以自定义
可以扫描单个和多个ip任意端口,进行web检测,导入csv里,扔进AWVS就可以了
这个文件是要扫描的ip文件,一个IP一行。
lemonlove7.csv文件为扫描后存在网站的url