Skip to content

Commit

Permalink
修改celery队列为RQ队列,修改扫描显示时区问题
Browse files Browse the repository at this point in the history
  • Loading branch information
Cl0udG0d committed Jul 9, 2020
1 parent ee930a2 commit 1bd0012
Show file tree
Hide file tree
Showing 9 changed files with 157 additions and 150 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ RUN mv /etc/apt/sources.list /etc/apt/sources.list.bak && \
echo "deb-src http://mirrors.aliyun.com/debian-security stretch/updates main contrib non-free" >>/etc/apt/sources.list && \
apt-get update && \
apt-get install nmap -y && \
pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
pip install -r requirements.txt -i https://mirrors.cloud.tencent.com/pypi/simple/
CMD ["flask", "run"]
73 changes: 37 additions & 36 deletions ImportToRedis.py
Original file line number Diff line number Diff line change
@@ -1,44 +1,45 @@
import redis
from config import redisPool


'''
默认6379端口,第0个数据库
'''


def ToRedis():
r = redis.Redis(connection_pool=redisPool)
# r.flushdb()
if not r.exists("SubScan"):
file1 = open(r"dict/SUB_scan.txt", "r", encoding='utf-8')
for line1 in file1.readlines():
r.lpush("SubScan", line1.replace("\n", ''))
file1.close()
if not r.exists("SenScan"):
file2 = open(r"dict/SEN_scan.txt", "r", encoding='utf-8')
for line2 in file2.readlines():
r.lpush("SenScan", line2.replace("\n", ""))
file2.close()
if not r.exists("XSSpayloads"):
file3=open('XSSBug/normal_payload.txt', 'r')
for line3 in file3.readlines():
r.lpush("XSSpayloads",line3.replace("\n",""))
file3.close()
if not r.exists("bugtype"):
file4=open('dict/bugtype.txt', 'r',encoding='utf-8')
for line4 in file4.readlines():
line4=line4.strip('\n')
name=line4.split(":")[0]
grade=line4.split(":")[1]
r.hset('bugtype',name,grade)
file4.close()
if not r.exists("useragents"):
file5 = open('dict/useragents.txt', 'r', encoding='utf-8')
for line5 in file5.readlines():
line5=line5.strip('\n')
r.lpush('useragents',line5)
file5.close()



ToRedis()

r = redis.Redis(connection_pool=redisPool)

# r.flushdb()
if not r.exists("SubScan"):
file1 = open(r"dict/SUB_scan.txt", "r", encoding='utf-8')
for line1 in file1.readlines():
r.lpush("SubScan", line1.replace("\n", ''))
file1.close()
if not r.exists("SenScan"):
file2 = open(r"dict/SEN_scan.txt", "r", encoding='utf-8')
for line2 in file2.readlines():
r.lpush("SenScan", line2.replace("\n", ""))
file2.close()
if not r.exists("XSSpayloads"):
file3=open('XSSBug/normal_payload.txt', 'r')
for line3 in file3.readlines():
r.lpush("XSSpayloads",line3.replace("\n",""))
file3.close()
if not r.exists("bugtype"):
file4=open('dict/bugtype.txt', 'r',encoding='utf-8')
for line4 in file4.readlines():
line4=line4.strip('\n')
name=line4.split(":")[0]
grade=line4.split(":")[1]
r.hset('bugtype',name,grade)
file4.close()
if not r.exists("useragents"):
file5 = open('dict/useragents.txt', 'r', encoding='utf-8')
for line5 in file5.readlines():
line5=line5.strip('\n')
r.lpush('useragents',line5)
file5.close()



84 changes: 81 additions & 3 deletions SZheConsole.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
import re
import requests
import core
from init import app
from exts import db
from models import BugList
from models import BaseInfo,IPInfo,DomainInfo,BugList
from BugScan import BugScan
import requests
import core
from init import redispool
from SpiderGetUrl2 import SpiderGetUrl2
from BaseMessage import GetBaseMessage
from IPMessage import IPMessage
from DomainMessage import DomainMessage


Bugs=["SQLBugScan","XSSBugScan","ComInScan","FileIncludeScan"]

requests.packages.urllib3.disable_warnings()
Expand Down Expand Up @@ -102,6 +109,77 @@ def inputfilter(url):
print("{}访问超时".format(attackurl))
return None,None,None

def SZheScan(url):
try:
#输入入口进行过滤
url,attackurl,rep=inputfilter(url)
#若过滤后无url,即url无效或响应时间过长,退出对该url的扫描
if not url:
print("Not Allow This URL")
return
redispool.hset("targetscan", "nowscan", attackurl)
with app.app_context():
# 对该url基础信息进行搜集,实例化GetBaseMessage对象
baseinfo = GetBaseMessage(url, attackurl,rep)
#正则表达式判断其为IP或是域名,并且实例化相应的深度信息搜集对象
pattern = re.compile('^\d+\.\d+\.\d+\.\d+(:(\d+))?$')
#判断IP是否存在端口
if pattern.findall(url) and ":" in url:
infourl=url.split(":")[0]
else:
infourl=url
if pattern.findall(url):
boolcheck = True
ipinfo = IPMessage(infourl)
else:
boolcheck = False
domaininfo = DomainMessage(url)
info = BaseInfo(url=url, boolcheck=boolcheck, status=baseinfo.GetStatus(), title=baseinfo.GetTitle(),
date=baseinfo.GetDate(), responseheader=baseinfo.GetResponseHeader(),
Server=baseinfo.GetFinger(), portserver=baseinfo.PortScan(), sendir=baseinfo.SenDir())
db.session.add(info)
db.session.flush()
infoid=info.id
db.session.commit()
baseinfo.WebLogicScan()
baseinfo.AngelSwordMain()
if boolcheck:
redispool.pfadd("ip", infourl)
ipinfo=IPInfo(baseinfoid=infoid, bindingdomain=ipinfo.GetBindingIP(), sitestation=ipinfo.GetSiteStation(),
CMessage=ipinfo.CScanConsole(),
ipaddr=ipinfo.FindIpAdd())
db.session.add(ipinfo)
else:
redispool.pfadd("domain", infourl)
domaininfo=DomainInfo(baseinfoid=infoid, subdomain=domaininfo.GetSubDomain(), whois=domaininfo.GetWhoisMessage(),
bindingip=domaininfo.GetBindingIP(),
sitestation=domaininfo.GetSiteStation(), recordinfo=domaininfo.GetRecordInfo(),
domainaddr=domaininfo.FindDomainAdd())
db.session.add(domaininfo)
db.session.commit()
#默认url深度爬取为 2 ,避免大站链接过多,可在设置中进行修改
redispool.append("runlog", "对{}页面进行深度爬取\n".format(attackurl))
SpiderGetUrl2(attackurl,deepth=2)
redispool.append("runlog", "对该网站{}爬取到的url进行常规漏扫 :D\n".format(attackurl))
print("对该网站爬取到的url进行常规漏扫 :D")
BugScanConsole(url)
count=redispool.hget('targetscan', 'waitcount')
if 'str' in str(type(count)):
waitcount=int(count)-1
redispool.hset("targetscan", "waitcount", str(waitcount))
else:
redispool.hset("targetscan", "waitcount", "0")
redispool.hdel("targetscan", "nowscan")
#漏洞列表中存在该url的漏洞,证明该url是受到影响的,将redis havebugpc受影响主机加一
firstbugurl= BugList.query.order_by(BugList.id.desc()).first().oldurl
if firstbugurl==url:
redispool.pfadd("havebugpc", url)
redispool.append("runlog", "{} scan end !\n".format(url))
print("{} scan end !".format(url))
# print(redispool.get('runlog'))
except Exception as e:
print(e)
pass

if __name__=='__main__':
print(inputfilter("https://www.cnblogs.com/"))
Expand Down
96 changes: 0 additions & 96 deletions celerytask.py

This file was deleted.

5 changes: 5 additions & 0 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import os
import redis
from rq import Queue
import rqsettings

'''
配置文件:
Expand All @@ -13,6 +15,7 @@
SECRET_KEY = os.urandom(24)



HOSTNAME='mysql'
# HOSTNAME='127.0.0.1'
PORT = '3306'
Expand All @@ -28,3 +31,5 @@
HOST = 'redis'
# HOST = '127.0.0.1'
redisPool = redis.ConnectionPool(host=HOST, port=6379, db=0, decode_responses=True)
db = redis.Redis(host=rqsettings.REDIS_HOST, port=rqsettings.REDIS_PORT, db=rqsettings.REDIS_DB, decode_responses=True)
queue = Queue(connection=db)
23 changes: 16 additions & 7 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,22 @@ services:
redis:
image: "redis"
restart: always
environment:
- TZ=Asia/Shanghai
mysql:
build: ./mysql
environment:
- TZ=Asia/Shanghai
- MYSQL_DATABASE=SZheScan
- MYSQL_ROOT_PASSWORD=root
ports:
- "3306:3306"
restart: always
command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
command: [
'--character-set-server=utf8mb4',
'--collation-server=utf8mb4_unicode_ci',
]

web:
build: .
ports:
Expand All @@ -22,12 +29,14 @@ services:
depends_on:
- mysql
restart: always
celery:
environment:
- TZ=Asia/Shanghai
rq:
build: .
command: "celery worker -A celerytask.celery -l INFO"
user: nobody
links:
- redis:redis
depends_on:
- redis
restart: always
- web
command: "rq worker -c rqsettings"
restart: always
environment:
- TZ=Asia/Shanghai
9 changes: 5 additions & 4 deletions index.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from models import User, Log, BaseInfo, InvitationCode, BugList, POC, IPInfo, DomainInfo, Profile
from exts import db
from init import app, redispool
from celerytask import SZheScan
import core
from decorators import login_required

from config import queue
from SZheConsole import SZheScan



Expand Down Expand Up @@ -215,7 +215,7 @@ def user():
return redirect(url_for('user'))


@app.route('/console', methods=['GET', 'POST'])
@app.route('/test_console', methods=['GET', 'POST'])
@login_required
def console():
bugbit, bugtype = core.GetBit()
Expand All @@ -227,7 +227,8 @@ def console():
urls=session['targetscan'].split()
redispool.hincrby('targetscan', 'waitcount', len(urls))
for url in urls:
SZheScan.delay(url)
queue.enqueue(SZheScan,url)
# SZheScan.delay(url)
session.pop('targetscan')
try:
lastscantime = BaseInfo.query.order_by(BaseInfo.id.desc()).first().date
Expand Down
Loading

0 comments on commit 1bd0012

Please sign in to comment.