diff --git a/LSpider/settings.py.bak b/LSpider/settings.py.bak
index 0c6e79e..a5f28bd 100644
--- a/LSpider/settings.py.bak
+++ b/LSpider/settings.py.bak
@@ -213,5 +213,11 @@ WECHAT_NOTICE_DEBUG = {
'agent_id': ' ',
}
+# for xray result
+VUL_LIST_PATH = os.path.join(BASE_DIR, 'vuls/')
+
+if os.path.isdir(VUL_LIST_PATH) is not True:
+ os.mkdir(VUL_LIST_PATH)
+
# for test
IS_TEST_ENVIRONMENT = False
diff --git a/README.md b/README.md
index c0a45f2..66d2055 100644
--- a/README.md
+++ b/README.md
@@ -58,9 +58,22 @@ LSpider从设计之初是为了配合像xray这种被动扫描器而诞生的,
python3 manage.py SpiderCoreBackendStart --test
```
+通过dockerfile安装(不推荐的安装模式)
+```
+cd ./docker
+
+docker-compose up -d
+```
+
+[dockerfile 安装&使用](./docker/readme.md)
+
+**使用dockerfile安装,推荐修改其中必要的配置信息以避免安全漏洞诞生。**
+
**值得注意的是,以下脚本可能会涉及到项目路径影响,使用前请修改相应的配置**
-启动LSpider webhook(默认端口2062)
+建议配合screen来挂起进程
+
+启动LSpider webhook 与漏洞展示页面(默认端口2062)
```
./lspider_webhook.sh
@@ -93,6 +106,10 @@ python3 manage.py SpiderCoreBackendStart --test
[如何配置扫描任务 以及 其他的配置相关](./docs/manage.md)
+扫描器结果输出到配置文件相同目录(默认为vuls/),则可以通过web界面访问。
+
+![](./docs/6.png)
+
# 使用内置的hackerone、bugcrowd爬虫获取目标
使用hackerone爬虫,你需要首先配置好hackerone账号
@@ -108,6 +125,12 @@ python3 manage.py SpiderCoreBackendStart --test
![](./docs/5.png)
+# Contributors
+
+感谢如下贡献者对本工具发展过程中的贡献:
+
+- [QGW](https://github.com/qboy0000)
+
# 404StarLink
![](https://github.com/knownsec/404StarLink-Project/raw/master/logo.png)
diff --git a/core/chromeheadless.py b/core/chromeheadless.py
index 1f903ab..d255b89 100644
--- a/core/chromeheadless.py
+++ b/core/chromeheadless.py
@@ -417,7 +417,7 @@ def finish_form(self):
def click_button(self):
try:
- submit_buttons = self.driver.find_element_by_xpath("//input[@type='submit']")
+ submit_buttons = self.driver.find_elements_by_xpath("//input[@type='submit']")
submit_buttons_len = len(submit_buttons)
diff --git a/docker/Dockerfile b/docker/Dockerfile
new file mode 100644
index 0000000..c15596a
--- /dev/null
+++ b/docker/Dockerfile
@@ -0,0 +1,59 @@
+#FROM ubuntu:16.04
+
+FROM python:3.7.9-alpine3.12
+
+Label author "qboy0000<2006qgw@163.com>"
+
+#COPY dist/floodlight /root/floodlight
+
+ENV XARY 1.7.0
+ENV PYTHON3 3.7.9
+ENV CHROMEDRIVER_VERSION 88.0.4324.96
+
+# RUN sed -i 's/http:\/\/archive.ubuntu.com/http:\/\/mirrors.huaweicloud.com/g' /etc/apt/sources.list && \
+# sed -i 's/http:\/\/security.ubuntu.com/http:\/\/mirrors.huaweicloud.com/g' /etc/apt/sources.list && \
+# apt-get update && \
+
+COPY ./requirement.txt /tmp/
+
+RUN set -x && sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories && \
+ apk update && \
+# GM+8
+# set time zone
+ apk add --no-cache tzdata && \
+ echo "Asia/Shanghai" > /etc/timezone && \
+ cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \
+ # dpkg-reconfigure -f noninteractive tzdata && \
+ apk add --no-cache musl-dev linux-headers git wget curl unzip mysql-client mariadb-dev make gcc chromium chromium-chromedriver && \
+
+# update python3 to 3.7
+ # wget https://www.python.org/ftp/python/$PYTHON3/Python-$PYTHON3.tar.xz -O /tmp/Python-$PYTHON3.tar.xz && \
+ # cd /tmp && tar -xvf Python-$PYTHON3.tar.xz && cd Python-$PYTHON3 && ./configure && make && make install && \
+
+ git clone --depth 1 https://github.com/knownsec/LSpider.git /opt/LSpider && \
+
+ cd /opt/LSpider/ && \
+ pip3 install -r /tmp/requirement.txt -i https://mirrors.aliyun.com/pypi/simple && \
+
+ wget https://download.xray.cool/xray/$XARY/xray_linux_amd64.zip -O /tmp/xray_linux_amd64.zip && \
+ mkdir -p /opt/xray && \
+ unzip /tmp/xray_linux_amd64.zip -d /opt/xray && \
+
+ # wget http://npm.taobao.org/mirrors/chromedriver/$CHROMEDRIVER_VERSION/chromedriver_linux64.zip -O /tmp/chromedriver_linux64.zip && \
+ # mkdir /opt/LSpider/bin && \
+ # unzip /tmp/chromedriver_linux64.zip -d /usr/bin && \
+ # mv /usr/bin/chromedriver /usr/bin/chromedriver_linux64 && \
+
+ rm -rf /tmp/*
+
+WORKDIR /opt/LSpider/
+COPY ./docker-entrypoint.sh /opt/LSpider/docker-entrypoint.sh
+COPY ./settings.py.docker.bak /opt/LSpider/LSpider/settings.py
+COPY ./xray.sh /opt/LSpider/xray.sh
+COPY ./chromeheadless.py /opt/LSpider/core/
+
+RUN chmod a+x /opt/LSpider/*.sh
+
+EXPOSE 2062
+
+CMD /opt/LSpider/docker-entrypoint.sh
\ No newline at end of file
diff --git a/docker/chromeheadless.py b/docker/chromeheadless.py
new file mode 100644
index 0000000..97ba9fc
--- /dev/null
+++ b/docker/chromeheadless.py
@@ -0,0 +1,599 @@
+#!/usr/bin/env python
+# encoding: utf-8
+'''
+@author: LoRexxar
+@contact: lorexxar@gmail.com
+@file: chromeheadless.py.py
+@time: 2020/3/17 15:17
+@desc:
+'''
+
+import time
+
+import selenium
+from selenium import webdriver
+from selenium.webdriver.support.ui import WebDriverWait
+from selenium.webdriver.support import expected_conditions as EC
+from selenium.webdriver.common.keys import Keys
+from selenium.common.exceptions import WebDriverException
+from selenium.webdriver.common.action_chains import ActionChains
+
+import os
+import traceback
+import random
+from urllib.parse import urlparse
+
+from LSpider.settings import CHROME_WEBDRIVER_PATH, CHROME_PROXY, IS_OPEN_CHROME_PROXY
+from LSpider.settings import CHROME_DOWNLOAD_PATH, IS_TEST_ENVIRONMENT
+from utils.log import logger
+from utils.base import random_string
+
+
+class ChromeDriver:
+ def __init__(self):
+ self.chromedriver_path = CHROME_WEBDRIVER_PATH
+ self.checkos()
+
+ try:
+ self.init_object()
+
+ except selenium.common.exceptions.SessionNotCreatedException:
+ logger.error("[Chrome Headless] ChromeDriver version wrong error.")
+ exit(0)
+
+ except selenium.common.exceptions.WebDriverException:
+ logger.error("[Chrome Headless] ChromeDriver load error.")
+ exit(0)
+
+ self.origin_url = ""
+
+ def checkos(self):
+
+ if os.name == 'nt':
+ self.chromedriver_path = os.path.join(self.chromedriver_path, "chromedriver_win32.exe")
+ elif os.name == 'posix':
+ self.chromedriver_path = os.path.join(self.chromedriver_path, "chromedriver")
+ else:
+ self.chromedriver_path = os.path.join(self.chromedriver_path, "chromedriver_mac64")
+
+ def init_object(self):
+
+ self.chrome_options = webdriver.ChromeOptions()
+ if not IS_TEST_ENVIRONMENT:
+ self.chrome_options.add_argument('--headless')
+ self.chrome_options.add_argument('--disable-gpu')
+ self.chrome_options.add_argument('--no-sandbox')
+ self.chrome_options.add_argument('--disable-images')
+ self.chrome_options.add_argument('--ignore-certificate-errors')
+ self.chrome_options.add_argument('--allow-running-insecure-content')
+ self.chrome_options.add_argument('blink-settings=imagesEnabled=false')
+ self.chrome_options.add_argument('--omnibox-popup-count="5"')
+ self.chrome_options.add_argument("--disable-popup-blocking")
+ self.chrome_options.add_argument("--disable-web-security")
+ self.chrome_options.add_argument("--disk-cache-size=1000")
+
+ # for download path
+ # try:
+ # if os.path.exists(CHROME_DOWNLOAD_PATH):
+ # os.mkdir(CHROME_DOWNLOAD_PATH)
+ #
+ # chrome_downloadfile_path = CHROME_DOWNLOAD_PATH
+ # except:
+ # chrome_downloadfile_path = "./tmp"
+
+ if os.name == 'nt':
+ chrome_downloadfile_path = "./tmp"
+ else:
+ chrome_downloadfile_path = '/dev/null'
+
+ prefs = {
+ 'download.prompt_for_download': True,
+ 'profile.default_content_settings.popups': 0,
+ 'download.default_directory': chrome_downloadfile_path
+ }
+
+ self.chrome_options.add_experimental_option('prefs', prefs)
+
+ # proxy
+ desired_capabilities = self.chrome_options.to_capabilities()
+ if IS_OPEN_CHROME_PROXY:
+ logger.info("[Chrome Headless] Proxy {} init".format(CHROME_PROXY))
+
+ desired_capabilities['acceptSslCerts'] = True
+ desired_capabilities['acceptInsecureCerts'] = True
+ desired_capabilities['proxy'] = {
+ "httpProxy": CHROME_PROXY,
+ "ftpProxy": CHROME_PROXY,
+ "sslProxy": CHROME_PROXY,
+ "noProxy": None,
+ "proxyType": "MANUAL",
+ "class": "org.openqa.selenium.Proxy",
+ "autodetect": False,
+ }
+ # self.chrome_options.add_argument('--proxy-server={}'.format(CHROME_PROXY))
+
+ self.chrome_options.add_argument(
+ 'user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36')
+
+ self.driver = webdriver.Chrome(chrome_options=self.chrome_options, executable_path=self.chromedriver_path,
+ desired_capabilities=desired_capabilities)
+
+ self.driver.set_page_load_timeout(15)
+ self.driver.set_script_timeout(5)
+
+ def get_resp(self, url, cookies=None, times=0, isclick=True):
+
+ try:
+ response_code = 1
+
+ self.origin_url = url
+ self.driver.implicitly_wait(5)
+ self.driver.get(url)
+
+ if cookies:
+ self.add_cookie(cookies)
+ self.driver.implicitly_wait(10)
+ self.driver.get(url)
+
+ # else:
+ # 检查是否是登录界面
+ if self.check_login():
+ logger.info("[ChromeHeadless] Page {} need login.".format(url))
+ response_code = 2
+ # return 2, True, ""
+
+ time.sleep(3)
+
+ if isclick:
+ if not self.click_page():
+ self.driver.implicitly_wait(10)
+ self.driver.get(url)
+
+ response_source = self.driver.page_source
+ response_title = self.driver.title
+
+ # return 1, self.driver.page_source, self.driver.title
+ return response_code, response_source, response_title
+
+ except selenium.common.exceptions.InvalidSessionIdException:
+ logger.warning("[ChromeHeadless]Chrome Headless quit unexpectedly..")
+
+ self.init_object()
+
+ logger.warning("[ChromeHeadless]retry once..{}".format(url))
+ self.get_resp(url, cookies, times + 1, isclick)
+ return -1, False, ""
+
+ except selenium.common.exceptions.TimeoutException:
+ logger.warning("[ChromeHeadless]Chrome Headless request timeout..{}".format(url))
+ if times > 0:
+ return -1, False, ""
+
+ logger.warning("[ChromeHeadless]retry once..{}".format(url))
+ self.get_resp(url, cookies, times + 1, isclick)
+ return -1, False, ""
+
+ except selenium.common.exceptions.InvalidCookieDomainException:
+ logger.warning("[ChromeHeadless]Chrome Headless request with cookie error..{}".format(url))
+
+ logger.warning("[ChromeHeadless]retry once..{}".format(url))
+ self.get_resp(url, None, times + 1, isclick)
+ return -1, False, ""
+
+ except selenium.common.exceptions.InvalidArgumentException:
+ logger.warning("[ChromeHeadless]Request error...{}".format(url))
+ logger.warning("[ChromeHeadless]{}".format(traceback.format_exc()))
+ return -1, False, ""
+
+ def add_cookie(self, cookies):
+
+ for cookie in cookies.split(';'):
+ key = cookie.split('=')[0].strip()
+ value = cookie.split('=')[1].strip()
+
+ if key and value:
+ try:
+ self.driver.add_cookie({'name': key, 'value': value})
+
+ except selenium.common.exceptions.UnableToSetCookieException:
+ logger.warning("[ChromeHeadless] Wrong Cookie {} set..".format(key))
+ continue
+
+ def click_page(self):
+
+ # self.click_link()
+ # 先把格子和表单填了
+ self.click_button()
+
+ # 链接要处理一下
+ self.click_link()
+
+ # onclick
+ self.click_onlick()
+
+ def check_back(self):
+ if self.check_host():
+ new_url = self.driver.current_url
+ # self.driver.back()
+ self.driver.implicitly_wait(5)
+ self.driver.get(self.origin_url)
+
+ return True
+ return False
+
+ def click_link(self):
+ """
+ 遇到一个问题,如果页面变化,那么获取到的标签hook会丢失,这里我们尝试用计数器来做
+ """
+
+ links = self.driver.find_elements_by_xpath('//a')
+ links_len = len(links)
+
+ for i in range(links_len):
+
+ try:
+ links = self.driver.find_elements_by_xpath('//a')
+ link = links[i]
+
+ href = link.get_attribute('href')
+ self.driver.execute_script(
+ "atags = document.getElementsByTagName('a');for(i=0;i<=atags.length;i++) { if(atags[i]){atags[i].setAttribute('target', '')}}")
+
+ if link.is_displayed() and link.is_enabled():
+ link.click()
+
+ self.check_back()
+
+ except selenium.common.exceptions.ElementNotInteractableException as e:
+ logger.warning("[ChromeHeadless][Click Page] error interact. {}".format(e))
+
+ self.check_back()
+ continue
+
+ except selenium.common.exceptions.StaleElementReferenceException:
+ logger.warning("[ChromeHeadless][Click Page] page reload or wrong back redirect")
+
+ self.check_back()
+ return
+
+ except IndexError:
+ logger.warning("[ChromeHeadless][Click Page] wrong index for link")
+ continue
+
+ except selenium.common.exceptions.NoSuchElementException:
+ logger.warning("[ChromeHeadless][Click Page] No Such Element")
+ return
+
+ def click_onlick(self):
+ """
+ 点包含onlick的按钮
+ :return:
+ """
+ divs = self.driver.find_elements_by_xpath('//*[@onclick]')
+ divs_len = len(divs)
+
+ for i in range(divs_len):
+
+ try:
+ divs = self.driver.find_elements_by_xpath('//*[@onclick]')
+ div = divs[i]
+
+ # href = div.get_attribute('href')
+
+ div.click()
+
+ self.check_back()
+
+ except selenium.common.exceptions.ElementNotInteractableException as e:
+ logger.warning("[ChromeHeadless][Click Page] error interact. {}".format(e))
+
+ self.check_back()
+ continue
+
+ except selenium.common.exceptions.StaleElementReferenceException:
+ logger.warning("[ChromeHeadless][Click Page] page reload or wrong back redirect")
+
+ self.check_back()
+ return
+
+ except IndexError:
+ logger.warning("[ChromeHeadless][Click Page] wrong index for link")
+ continue
+
+ except selenium.common.exceptions.NoSuchElementException:
+ logger.warning("[ChromeHeadless][Click Page] No Such Element")
+ return
+
+ def smart_input(self, input):
+ """
+ 简单的智能表单填充
+ :param input:
+ :return:
+ """
+
+ # user
+ for key in ['user', '用户名', 'name']:
+ if key in input.get_attribute('outerHTML'):
+ input.send_keys('admin')
+ return
+
+ # pass
+ for key in ['pass', 'pwd', '密码']:
+ if key in input.get_attribute('outerHTML'):
+ input.send_keys('123456')
+ return
+
+ # email
+ for key in ['email']:
+ if key in input.get_attribute('outerHTML'):
+ input.send_keys('{}@{}.com'.format(random_string(4), random_string(4)))
+ return
+
+ # phone
+ for key in ['phone']:
+ if key in input.get_attribute('outerHTML'):
+ input.send_keys('{}'.format(random.randint(13000000000, 14000000000)))
+ return
+
+ # address
+ for key in ['address', 'street']:
+ if key in input.get_attribute('outerHTML'):
+ input.send_keys('4492 Garfield Road')
+ return
+
+ # checkbox
+ if input.get_attribute('type') == 'checkbox':
+ input.click()
+
+ if input.get_attribute('type') == 'radio':
+ input.click()
+
+ input.send_keys(random_string())
+
+ return
+
+ def finish_form(self):
+ """
+ 填充表单
+ :return:
+ """
+ inputs = self.driver.find_elements_by_xpath("//input")
+ self.driver.execute_script(
+ "itags = document.getElementsByTagName('input');for(i=0;i<=itags.length;i++) { if(itags[i]){itags[i].removeAttribute('style')}}")
+
+ input_lens = len(inputs)
+
+ if not inputs:
+ return
+
+ for i in range(input_lens):
+ try:
+ input = inputs[i]
+
+ # 移动鼠标
+ # 如果标签没有隐藏,那么移动鼠标
+ if input.is_enabled() and input.is_displayed():
+
+ action = ActionChains(self.driver)
+ action.move_to_element(input).perform()
+
+ self.smart_input(input)
+ else:
+ tag_id = input.get_attribute('id')
+
+ if tag_id:
+ self.driver.execute_script(
+ "document.getElementById('{}').setAttribute('value', '{}')".format(tag_id,
+ random_string()))
+
+ except selenium.common.exceptions.ElementNotInteractableException as e:
+ logger.warning("[ChromeHeadless][Click button] error interact...{}".format(e))
+ tag_id = input.get_attribute('id')
+
+ if tag_id:
+ self.driver.execute_script(
+ "document.getElementById('{}').setAttribute('value', '{}')".format(tag_id, random_string()))
+
+ continue
+
+ except selenium.common.exceptions.JavascriptException:
+ tag_id = input.get_attribute('id')
+
+ if tag_id:
+ self.driver.execute_script(
+ "document.getElementById('{}').setAttribute('value', '{}')".format(tag_id, random_string()))
+
+ continue
+
+ except selenium.common.exceptions.StaleElementReferenceException:
+ logger.warning("[ChromeHeadless][Click button] page reload or wrong back redirect")
+
+ return
+
+ except IndexError:
+ logger.warning("[ChromeHeadless][Click button] wrong index for button")
+ continue
+
+ def click_button(self):
+
+ try:
+ submit_buttons = self.driver.find_element_by_xpath("//input[@type='submit']")
+
+ submit_buttons_len = len(submit_buttons)
+
+ for i in range(submit_buttons_len):
+
+ try:
+ submit_buttons = self.driver.find_elements_by_xpath("//input[@type='submit']")
+ submit_button = submit_buttons[i]
+
+ # 完成表单
+ self.finish_form()
+
+ # 移动鼠标
+ if submit_button.is_displayed() and submit_button.is_enabled():
+ action = ActionChains(self.driver)
+ action.move_to_element(submit_button).perform()
+
+ submit_button.click()
+
+ self.check_back()
+ except selenium.common.exceptions.ElementNotInteractableException:
+ logger.warning("[ChromeHeadless][Click button] error interact")
+
+ self.check_back()
+ continue
+
+ except selenium.common.exceptions.StaleElementReferenceException:
+ logger.warning("[ChromeHeadless][Click button] page reload or wrong back redirect")
+
+ return
+
+ except IndexError:
+ logger.warning("[ChromeHeadless][Click button] wrong index for button")
+ continue
+ except selenium.common.exceptions.NoSuchElementException as e:
+ logger.warning("[ChromeHeadless][Click button] No Such Element.{}".format(e))
+
+ try:
+ buttons = self.driver.find_elements_by_tag_name('button')
+ buttons_len = len(buttons)
+
+ for i in range(buttons_len):
+
+ try:
+ buttons = self.driver.find_elements_by_tag_name('button')
+ button = buttons[i]
+
+ # 完成表单
+ self.finish_form()
+
+ if button.is_enabled() and button.is_displayed():
+ action = ActionChains(self.driver)
+ action.move_to_element(button).perform()
+ button.click()
+
+ self.check_back()
+
+ except selenium.common.exceptions.ElementNotInteractableException:
+ logger.warning("[ChromeHeadless][Click button] error interact")
+
+ self.check_back()
+ continue
+
+ except selenium.common.exceptions.StaleElementReferenceException:
+ logger.warning("[ChromeHeadless][Click button] page reload or wrong back redirect")
+
+ return
+
+ except IndexError:
+ logger.warning("[ChromeHeadless][Click button] wrong index for button")
+ continue
+
+ except selenium.common.exceptions.NoSuchElementException:
+ logger.warning("[ChromeHeadless][Click button] No Such Element.{}".format(traceback.format_exc()))
+ return
+
+ def check_login(self):
+ """
+ 检查当前页面是否有登录框
+ :return:
+ """
+ try:
+ is_has_login_form = False
+ is_has_login_button = False
+ is_has_login_input = False
+ is_has_login_a = False
+
+ forms = self.driver.find_elements_by_tag_name('form')
+ forms_len = len(forms)
+
+ if not forms:
+ is_has_login_form = False
+
+ for i in range(forms_len):
+ form = forms[i]
+
+ for key in ['login', '登录', 'sign', '用户名', 'user', 'pass', '用户名', 'pwd', 'phone', '注册']:
+ if key in form.text:
+ is_has_login_form = True
+
+ buttons = self.driver.find_elements_by_tag_name('button')
+ buttons_len = len(buttons)
+
+ if not buttons:
+ is_has_login_button = False
+
+ for i in range(buttons_len):
+ button = buttons[i]
+
+ if button.is_enabled() and button.is_displayed():
+
+ for key in ['login', 'sign', 'user', 'pass']:
+ if key in button.get_attribute('outerHTML'):
+ is_has_login_button = True
+ inputs = self.driver.find_elements_by_tag_name('input')
+ inputs_len = len(inputs)
+
+ if not inputs:
+ is_has_login_input = False
+
+ for i in range(inputs_len):
+ input = inputs[i]
+
+ if input.is_enabled() and input.is_displayed():
+
+ for key in ['login', 'sign', 'user', 'pass', 'account', 'phone', '手机']:
+ if key in input.get_attribute('outerHTML'):
+ is_has_login_input = True
+
+ atags = self.driver.find_elements_by_tag_name('a')
+ atags_len = len(atags)
+
+ for i in range(atags_len):
+ atag = atags[i]
+
+ if atag.is_enabled() and atag.is_displayed():
+
+ for key in ['login', 'sign', '登录', '登入']:
+ if key in atag.text:
+ is_has_login_a = True
+
+ if is_has_login_button or is_has_login_form or is_has_login_input or is_has_login_a:
+ return True
+ else:
+ return False
+
+ except selenium.common.exceptions.NoSuchElementException:
+ logger.warning("[ChromeHeadless][Click Page] No Such Element")
+ return
+
+ except:
+ logger.error("[ChromeHeadless] Bad check...{}".format(traceback.format_exc()))
+ return False
+
+ def check_host(self):
+ origin = urlparse(self.origin_url)
+ now = urlparse(self.driver.current_url)
+
+ if (origin.netloc != now.netloc) or (origin.path.replace('/', '') != now.path.replace('/', '')) or (
+ origin.params != now.params) or (origin.query != now.query):
+ return now.geturl()
+
+ return False
+
+ def close_driver(self):
+ self.driver.quit()
+ # self.driver.close()
+ time.sleep(1)
+
+ def __del__(self):
+ self.close_driver()
+
+
+if __name__ == "__main__":
+ Req = ChromeDriver()
+
+ Req.get_resp("http://baidu.com", isclick=False)
+
+ # print(Req.get_resp("https://cdn.jsdelivr.net/npm/jquery@3.3.1/dist/jquery.min.js"))
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
new file mode 100644
index 0000000..a399530
--- /dev/null
+++ b/docker/docker-compose.yaml
@@ -0,0 +1,56 @@
+version: "2.0"
+
+services:
+ mysql:
+ image: mysql:5.7
+ container_name: mysql
+ hostname: mysql
+ restart: always
+ environment:
+ - MYSQL_ROOT_PASSWORD=mysql@lspider
+ phpmyadmin:
+ image: phpmyadmin:5.0.4-apache
+ container_name: myadmin
+ hostname: myadmin
+ restart: always
+ environment:
+ - MYSQL_ROOT_PASSWORD=mysql@lspider
+ - PMA_HOST=mysql
+ - MYSQL_USER=root
+ - MYSQL_PASSWORD=phpmyadmin@123
+ ports:
+ - 2063:80
+ links:
+ - mysql
+ depends_on: ['mysql']
+ rabbitmq:
+ image: rabbitmq:3
+ container_name: rabbitmq
+ hostname: rabbitmq
+ restart: always
+ environment:
+ - RABBITMQ_DEFAULT_USER=user
+ - RABBITMQ_DEFAULT_PASS=rabbitmq@lspider
+ - RABBITMQ_DEFAULT_VHOST=lspider_vhost
+ lspider:
+ build: .
+ container_name: lspider
+ hostname: lspider
+ restart: always
+ environment:
+ - RABBITMQ_IP=rabbitmq
+ - RABBITMQ_PORT=5672
+ - RABBITMQ_USERNAME=user
+ - RABBITMQ_PASSWORD=rabbitmq@lspider
+ - RABBITMQ_VHOST=lspider_vhost
+ - MYSQL_USER=root
+ - MYSQL_PASSWORD=mysql@lspider
+ - MYSQL_HOST=mysql
+ - MYSQL_PORT=3306
+ - MYSQL_DBName=LSpider
+ ports:
+ - 2062:2062
+ links:
+ - mysql
+ - rabbitmq
+ depends_on: ['mysql','rabbitmq']
\ No newline at end of file
diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh
new file mode 100644
index 0000000..6d66972
--- /dev/null
+++ b/docker/docker-entrypoint.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+#wait mysql & rabbitmq statup
+sleep 20
+
+mysql -u$MYSQL_USER -p$MYSQL_PASSWORD -h$MYSQL_HOST -P $MYSQL_PORT -e "CREATE DATABASE $MYSQL_DBName;"
+
+python3 manage.py makemigrations
+python3 manage.py migrate
+
+sed -i 's/\/bin\/bash/\/bin\/sh/g' lspider_webhook.sh
+sed -i 's/\/bin\/bash/\/bin\/sh/g' lspider_start.sh
+
+nohup ./lspider_webhook.sh &
+nohup ./lspider_start.sh &
+./xray.sh
\ No newline at end of file
diff --git a/docker/readme.md b/docker/readme.md
new file mode 100644
index 0000000..76eafa9
--- /dev/null
+++ b/docker/readme.md
@@ -0,0 +1,17 @@
+# LSpider的docker环境
+> 为了简化构建环境,将该环境部署为一个docker
+
+## docker运行如下
+```
+docker-compose up -d
+```
+
+## 结果访问
+```
+http://127.0.0.1:2062/vuls/
+```
+
+## phpmyadmin访问
+```
+http://127.0.0.1:2063
+```
\ No newline at end of file
diff --git a/docker/requirement.txt b/docker/requirement.txt
new file mode 100644
index 0000000..c2be690
--- /dev/null
+++ b/docker/requirement.txt
@@ -0,0 +1,9 @@
+django==3.1.1
+mysqlclient==2.0.1
+pika==1.1.0
+bs4
+selenium==3.141.0
+requests==2.24.0
+wechatpy==1.8.14
+colorlog
+pycrypto
\ No newline at end of file
diff --git a/docker/settings.py.docker.bak b/docker/settings.py.docker.bak
new file mode 100644
index 0000000..acc4165
--- /dev/null
+++ b/docker/settings.py.docker.bak
@@ -0,0 +1,223 @@
+"""
+Django settings for LSpider project.
+
+Generated by 'django-admin startproject' using Django 1.11.29.
+
+For more information on this file, see
+https://docs.djangoproject.com/en/1.11/topics/settings/
+
+For the full list of settings and their values, see
+https://docs.djangoproject.com/en/1.11/ref/settings/
+"""
+
+import os
+
+# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = 'this_is_a_secret_key'
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ['*']
+
+
+# Application definition
+
+INSTALLED_APPS = [
+ 'django.contrib.admin',
+ 'django.contrib.auth',
+ 'django.contrib.contenttypes',
+ 'django.contrib.sessions',
+ 'django.contrib.messages',
+ 'django.contrib.staticfiles',
+ 'web.index',
+ 'web.spider',
+ 'web.vultargetspider',
+]
+
+MIDDLEWARE = [
+ 'django.middleware.security.SecurityMiddleware',
+ 'django.contrib.sessions.middleware.SessionMiddleware',
+ 'django.middleware.common.CommonMiddleware',
+ 'django.middleware.csrf.CsrfViewMiddleware',
+ 'django.contrib.auth.middleware.AuthenticationMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+]
+
+ROOT_URLCONF = 'LSpider.urls'
+
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [os.path.join(BASE_DIR, 'templates')]
+ ,
+ 'APP_DIRS': True,
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.template.context_processors.debug',
+ 'django.template.context_processors.request',
+ 'django.contrib.auth.context_processors.auth',
+ 'django.contrib.messages.context_processors.messages',
+ ],
+ },
+ },
+]
+
+WSGI_APPLICATION = 'LSpider.wsgi.application'
+
+
+# Database
+# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.mysql',
+ 'NAME': 'LSpider',
+ 'USER': 'root',
+ 'PASSWORD': 'mysql@lspider',
+ 'HOST': 'mysql',
+ 'PORT': '3306',
+ 'OPTIONS': {
+ 'init_command': 'SET default_storage_engine=INNODB;SET NAMES utf8mb4',
+ 'charset': 'utf8mb4',
+ },
+ 'TEST': {
+ 'CHARSET': 'utf8',
+ 'COLLATION': 'utf8_general_ci',
+ },
+ }
+}
+
+
+# Password validation
+# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
+
+AUTH_PASSWORD_VALIDATORS = [
+ {
+ 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
+ },
+ {
+ 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
+ },
+ {
+ 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
+ },
+ {
+ 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
+ },
+]
+
+
+# cors
+CORS_ALLOW_CREDENTIALS = True
+CORS_ORIGIN_ALLOW_ALL = True
+CORS_ORIGIN_WHITELIST = [
+ '*'
+]
+
+CORS_ALLOW_METHODS = [
+ 'DELETE',
+ 'GET',
+ 'OPTIONS',
+ 'PATCH',
+ 'POST',
+ 'PUT',
+ 'VIEW',
+]
+
+CORS_ALLOW_HEADERS = [
+ 'XMLHttpRequest',
+ 'X_FILENAME',
+ 'accept-encoding',
+ 'authorization',
+ 'content-type',
+ 'dnt',
+ 'origin',
+ 'user-agent',
+ 'x-csrftoken',
+ 'x-requested-with',
+ 'Pragma',
+]
+
+CSRF_COOKIE_SAMESITE = None
+SESSION_COOKIE_SAMESITE = None
+
+# Internationalization
+# https://docs.djangoproject.com/en/1.11/topics/i18n/
+
+LANGUAGE_CODE = 'en-us'
+
+TIME_ZONE = 'UTC'
+
+USE_I18N = True
+
+USE_L10N = True
+
+USE_TZ = True
+
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/1.11/howto/static-files/
+
+STATIC_URL = os.path.join(BASE_DIR, '/static/')
+
+CHROME_DOWNLOAD_PATH = '/tmp/lspider'
+
+# Chrome webdriver
+CHROME_WEBDRIVER_PATH = os.path.join(BASE_DIR, 'bin/')
+
+# setting for spider
+
+LIMIT_DEEP = 2
+THREADPOOL_MAX_THREAD_NUM = 5
+
+# rabbitmq
+RABBITMQ_IP = "rabbitmq"
+RABBITMQ_PORT = "5672"
+RABBITMQ_USERNAME = "user"
+RABBITMQ_PASSWORD = "rabbitmq@lspider"
+RABBITMQ_VHOST = "lspider_vhost"
+
+# wheather open rabbitmq
+IS_OPEN_RABBITMQ = True
+
+# proxy for chrome headless
+IS_OPEN_CHROME_PROXY = True
+CHROME_PROXY = '127.0.0.1:7777'
+
+# for hackerone
+HACKERONE_USERNAME = ""
+HACKERONE_PASSWORD = ""
+
+# loghander
+LOGHANDER_IS_OPEN_WEIXIN = False
+
+# for weixin
+WECHAT_NOTICE = {
+ 'corp_id': ' ',
+ 'secret': ' ',
+ 'agent_id': ' ',
+}
+
+WECHAT_NOTICE_DEBUG = {
+ 'corp_id': ' ',
+ 'secret': ' ',
+ 'agent_id': ' ',
+}
+
+# for xray result
+VUL_LIST_PATH = os.path.join(BASE_DIR, 'vuls/')
+
+if os.path.isdir(VUL_LIST_PATH) is not True:
+ os.mkdir(VUL_LIST_PATH)
+
+# for test
+IS_TEST_ENVIRONMENT = False
diff --git a/docker/xray.sh b/docker/xray.sh
new file mode 100644
index 0000000..450c9d0
--- /dev/null
+++ b/docker/xray.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+
+while :
+do
+ if [ $(ps aux | grep xray_linux_amd64|grep -v grep|wc -l) -eq 0 ];then
+ echo "start"
+ /opt/xray/xray_linux_amd64 webscan --listen 127.0.0.1:7777 --html-output /opt/LSpider/vuls/r__datetime__.html
+ fi
+ sleep 10
+done
diff --git a/docs/6.png b/docs/6.png
new file mode 100644
index 0000000..be58ff3
Binary files /dev/null and b/docs/6.png differ
diff --git a/docs/changelog.md b/docs/changelog.md
new file mode 100644
index 0000000..fdaf301
--- /dev/null
+++ b/docs/changelog.md
@@ -0,0 +1,10 @@
+## 更新日志
+- 2021-1-26
+ - LSpider v1.0.0正式发布
+ - 更新大量相关文档
+- 2021-2-2
+ - LSpider v1.0.1
+ - 添加Web模式用来适配被动扫描器输出
+- 2021-2-22
+ - LSpider v1.0.2
+ - 添加docker环境以便于快速搭建环境。感谢@QGW
\ No newline at end of file
diff --git a/docs/config.md b/docs/config.md
index e4ef1cb..c37778c 100644
--- a/docs/config.md
+++ b/docs/config.md
@@ -80,6 +80,16 @@ WECHAT_NOTICE_DEBUG = {
}
```
+这个配置是关于被动扫描器的输出位置,这里默认为当前目录的vuls,其中对应的web界面也为相同路径。
+```
+# for xray result
+VUL_LIST_PATH = os.path.join(BASE_DIR, 'vuls/')
+
+if os.path.isdir(VUL_LIST_PATH) is not True:
+ os.mkdir(VUL_LIST_PATH)
+```
+
+
如果开启这个配置,Chrome webdriver 会以非headless的模式启动,便于调试环境
```
# for test
diff --git a/lspider_start.sh b/lspider_start.sh
index d72359f..16fd573 100644
--- a/lspider_start.sh
+++ b/lspider_start.sh
@@ -4,6 +4,7 @@ while :
do
if [ $(ps aux | grep SpiderCoreBackendStart|grep -v grep|wc -l) -eq 0 ];then
echo "start"
+ chmod 644 $(cd "$(dirname "$0")";pwd)/vuls/*
python3 $(cd "$(dirname "$0")";pwd)/manage.py SpiderCoreBackendStart
fi
sleep 100
diff --git a/lspider_stop.sh b/lspider_stop.sh
index 496f85e..33f5d11 100644
--- a/lspider_stop.sh
+++ b/lspider_stop.sh
@@ -4,6 +4,3 @@ kill -2 $(ps aux | grep SpiderCoreBackendStart|grep -v grep|awk '{print $2}')
sleep 3
kill -9 $(ps aux | grep SpiderCoreBackendStart|grep -v grep|awk '{print $2}')
kill -9 $(ps aux | grep chrome|grep -v grep|awk '{print $2}')
-
-
-chown www:www /home/wwwroot/default/xray/r*
\ No newline at end of file
diff --git a/templates/Vullist.html b/templates/Vullist.html
new file mode 100644
index 0000000..b421b59
--- /dev/null
+++ b/templates/Vullist.html
@@ -0,0 +1,9 @@
+{% block title %}Vulfile list:
{% endblock %}
+
+{% block body %}
+
+ {% for file in filelist %}
+ {{ file }}
+ {% endfor %}
+
+{% endblock %}
\ No newline at end of file
diff --git a/web/index/urls.py b/web/index/urls.py
index 79e705f..5ff0272 100644
--- a/web/index/urls.py
+++ b/web/index/urls.py
@@ -21,5 +21,7 @@
urlpatterns = [
path("", views.index),
+ path("vuls/", views.VulFileListView.as_view(), name="vullist"),
+ path("vuls/", views.VulFileListView.as_view(), name="vulpath"),
path("webhook", csrf_exempt(views.WebhookView.as_view()), name="webhook"),
]
diff --git a/web/index/views.py b/web/index/views.py
index 516fe8a..fdd40f8 100644
--- a/web/index/views.py
+++ b/web/index/views.py
@@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
+import os
import json
import time
+import codecs
from django.views import View
from django.shortcuts import render
@@ -10,11 +12,51 @@
from utils.wechathandler import ReMess
+from LSpider.settings import VUL_LIST_PATH
+
def index(req):
return HttpResponse("Hello Lspider.")
+class VulFileListView(View):
+ """
+ 扫描器结果展示列表
+ """
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ def get(self, request, filepath=""):
+
+ if filepath and ('./' in filepath or '..' in filepath):
+ return HttpResponse("Go back. Hacker~")
+
+ now_vul_path = os.path.join(VUL_LIST_PATH, filepath)
+
+ if os.path.isfile(now_vul_path):
+
+ content = codecs.open(now_vul_path, 'r+', encoding='utf-8', errors='ignore')
+ return HttpResponse(content)
+
+ if not os.path.isdir(now_vul_path):
+ return HttpResponse("Bad Request. VUL_LIST_PATH needs to be configured or current path Error.")
+
+ self.file_list = []
+
+ for filename in os.listdir(now_vul_path):
+ if os.path.isdir(os.path.join(now_vul_path, filename)):
+ self.file_list.append("{}/".format(filename))
+
+ else:
+ self.file_list.append(filename)
+
+ self.file_list.sort(reverse=True)
+ data = {'filelist': self.file_list}
+
+ return render(request, 'Vullist.html', data)
+
+
class WebhookView(View):
"""
授权模块
diff --git a/xray.sh b/xray.sh
index 69efd43..1aac02e 100644
--- a/xray.sh
+++ b/xray.sh
@@ -4,7 +4,7 @@ while :
do
if [ $(ps aux | grep xray_linux_amd64|grep -v grep|wc -l) -eq 0 ];then
echo "start"
- /home/ubuntu/lorexxar/xray/xray/xray_linux_amd64 webscan --listen 127.0.0.1:7777 --html-output /home/wwwroot/default/xray/r__datetime__.html
+ ../xray/xray_linux_amd64 webscan --listen 127.0.0.1:7777 --html-output $(cd "$(dirname "$0")";pwd)/vuls/r__datetime__.html
fi
sleep 10
done