mirror of https://github.com/OWASP/Nettacker.git
Migration to sqlalchemy (#140)
* Initial setup to migrate to sqlalchemy * Add support for MySQL and Sqlite * Codacy fixes * Add sqlalchemy to requirements * Fixes Travis Issue * Initial creation of the database * remove unnecessary new lines * add standard header and remove unnecessary NL * add standard header and remove unnecessary NL * Integrating the sqlalchemy-migration into framework * Update readme.md for the database module * replace type with isinstance - codacy fixation * declaring an unused variable https://stackoverflow.com/questions/818828/is-it-possible-to-implement-a-python-for-range-loop-without-an-iterator-variable/ * declaring an unused variable * declaring an unused variable * fix exception type - codacy * fix exception type codacy * fix exception type - codacy * Incorporate the suggested changes * PEP8 fix
This commit is contained in:
parent
5080402339
commit
865fe3e508
|
|
@ -32,12 +32,12 @@ from core.config_builder import _builder
|
|||
from api.api_core import __remove_non_api_keys
|
||||
from api.api_core import __rules
|
||||
from api.api_core import __api_key_check
|
||||
from api.__database import __select_results
|
||||
from api.__database import __get_result
|
||||
from api.__database import __last_host_logs
|
||||
from api.__database import __logs_to_report_json
|
||||
from api.__database import __search_logs
|
||||
from api.__database import __logs_to_report_html
|
||||
from database.db import __select_results
|
||||
from database.db import __get_result
|
||||
from database.db import __last_host_logs
|
||||
from database.db import __logs_to_report_json
|
||||
from database.db import __search_logs
|
||||
from database.db import __logs_to_report_html
|
||||
from api.__start_scan import __scan
|
||||
from core._time import now
|
||||
|
||||
|
|
|
|||
|
|
@ -67,7 +67,32 @@ def _api_config():
|
|||
"enabled": False,
|
||||
"filename": "nettacker_api_access.log"
|
||||
},
|
||||
"api_db_name": _paths()["home_path"] + "/database.sqlite3"
|
||||
}
|
||||
|
||||
|
||||
def _database_config():
|
||||
"""
|
||||
Database Config (could be modified by user)
|
||||
For sqlite database:
|
||||
fill the name of the DB as sqlite,
|
||||
DATABASE as the name of the db user wants
|
||||
other details can be left empty
|
||||
For mysql users:
|
||||
fill the name of the DB as mysql
|
||||
DATABASE as the name of the database you want to create
|
||||
USERNAME, PASSWORD, HOST and the PORT of the MySQL server need to be filled respectively
|
||||
|
||||
Returns:
|
||||
a JSON with Database configuration
|
||||
"""
|
||||
return {
|
||||
"DB": "sqlite",
|
||||
# "DB":"mysql",
|
||||
"DATABASE": _paths()["home_path"] + "/nettacker.db", # Name of the database
|
||||
"USERNAME": "",
|
||||
"PASSWORD": "",
|
||||
"HOST": "",
|
||||
"PORT": ""
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -120,7 +145,12 @@ def _core_config():
|
|||
"api_client_white_list_ips": _api_config()["api_client_white_list"]["ips"],
|
||||
"api_access_log": _api_config()["api_access_log"]["enabled"],
|
||||
"api_access_log_filename": _api_config()["api_access_log"]["filename"],
|
||||
"api_db_name": _api_config()["api_db_name"],
|
||||
"database_type": _database_config()["DB"],
|
||||
"database_name": _database_config()["DATABASE"],
|
||||
"database_username": _database_config()["USERNAME"],
|
||||
"database_password": _database_config()["PASSWORD"],
|
||||
"database_host": _database_config()["HOST"],
|
||||
"database_port": _database_config()["PORT"],
|
||||
"home_path": _paths()["home_path"],
|
||||
"tmp_path": _paths()["tmp_path"],
|
||||
"results_path": _paths()["results_path"]
|
||||
|
|
|
|||
|
|
@ -58,7 +58,23 @@ def _api_default_config():
|
|||
"enabled": False,
|
||||
"filename": "nettacker_api_access.log"
|
||||
},
|
||||
"api_db_name": default_paths()["home_path"] + "/database.sqlite3"
|
||||
}
|
||||
|
||||
|
||||
def _database_default_config():
|
||||
"""
|
||||
Default database Config
|
||||
|
||||
Returns:
|
||||
a JSON with Database configuration
|
||||
"""
|
||||
return {
|
||||
"DB": "sqlite",
|
||||
"DATABASE": default_paths()["home_path"] + "/nettacker.db",
|
||||
"USERNAME": "",
|
||||
"PASSWORD": "",
|
||||
"HOST": "",
|
||||
"PORT": ""
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -112,7 +128,12 @@ def _core_default_config():
|
|||
"api_client_white_list_ips": _api_default_config()["api_client_white_list"]["ips"],
|
||||
"api_access_log": _api_default_config()["api_access_log"]["enabled"],
|
||||
"api_access_log_filename": _api_default_config()["api_access_log"]["filename"],
|
||||
"api_db_name": _api_default_config()["api_db_name"],
|
||||
"database_type": _database_default_config()["DB"],
|
||||
"database_name": _database_default_config()["DATABASE"],
|
||||
"database_username": _database_default_config()["USERNAME"],
|
||||
"database_password": _database_default_config()["PASSWORD"],
|
||||
"database_host": _database_default_config()["HOST"],
|
||||
"database_port": _database_default_config()["PORT"],
|
||||
"home_path": default_paths()["home_path"],
|
||||
"tmp_path": default_paths()["tmp_path"],
|
||||
"results_path": default_paths()["results_path"]
|
||||
|
|
|
|||
|
|
@ -106,7 +106,7 @@ def __check_external_modules():
|
|||
True if success otherwise None
|
||||
"""
|
||||
external_modules = ["argparse", "netaddr", "requests", "paramiko", "texttable", "socks", "win_inet_pton",
|
||||
"flask", "sqlite3"]
|
||||
"flask", "sqlalchemy"]
|
||||
for module in external_modules:
|
||||
try:
|
||||
__import__(module)
|
||||
|
|
@ -136,16 +136,25 @@ def __check_external_modules():
|
|||
except:
|
||||
__die_failure("cannot access the directory {0}".format(
|
||||
default_config["results_path"]))
|
||||
if not os.path.isfile(default_config["api_db_name"]):
|
||||
if default_config["database_type"] == "sqlite":
|
||||
try:
|
||||
copyfile(os.path.dirname(inspect.getfile(api)) +
|
||||
'/database.sqlite3', default_config["api_db_name"])
|
||||
if not os.path.isfile(default_config["api_db_name"]):
|
||||
__die_failure("cannot access the directory {0}".format(
|
||||
default_config["api_db_name"]))
|
||||
if os.path.isfile(default_config["home_path"]+"/"+default_config["database_name"]):
|
||||
pass
|
||||
else:
|
||||
from database.sqlite_create import sqlite_create_tables
|
||||
sqlite_create_tables()
|
||||
except:
|
||||
__die_failure("cannot access the directory {0}".format(
|
||||
default_config["api_db_name"]))
|
||||
default_config["home_path"]))
|
||||
elif default_config["database_type"] == "mysql":
|
||||
try:
|
||||
from database.mysql_create import mysql_create_tables, mysql_create_database
|
||||
mysql_create_database()
|
||||
mysql_create_tables()
|
||||
except:
|
||||
__die_failure(messages("en", "database_connection_failed"))
|
||||
else:
|
||||
__die_failure(messages("en", "invalid_database"))
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -11,10 +11,10 @@ from core.alert import error
|
|||
from core import compatible
|
||||
from core._time import now
|
||||
from core._die import __die_failure
|
||||
from api.__database import submit_report_to_db
|
||||
from api.__database import submit_logs_to_db
|
||||
from api.__database import remove_old_logs
|
||||
from api.__database import __logs_by_scan_id
|
||||
from database.db import submit_report_to_db
|
||||
from database.db import submit_logs_to_db
|
||||
from database.db import remove_old_logs
|
||||
from database.db import __logs_by_scan_id
|
||||
from core.config_builder import default_paths
|
||||
from core.config import _paths
|
||||
from core.config_builder import _builder
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
pass
|
||||
|
|
@ -1,39 +1,64 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
from core.config import _core_config
|
||||
from core.config_builder import _core_default_config
|
||||
from core.config_builder import _builder
|
||||
from flask import jsonify
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from database.models import HostsLog, Report
|
||||
from core.alert import warn
|
||||
from core.alert import info
|
||||
from core.alert import messages
|
||||
from api.api_core import __structure
|
||||
from flask import jsonify
|
||||
from core.compatible import version
|
||||
from core._time import now
|
||||
from core import compatible
|
||||
from api.api_core import __structure
|
||||
from core.config import _database_config
|
||||
|
||||
|
||||
DB = _database_config()["DB"]
|
||||
USER = _database_config()["USERNAME"]
|
||||
PASSWORD = _database_config()["PASSWORD"]
|
||||
HOST = _database_config()["HOST"]
|
||||
PORT = _database_config()["PORT"]
|
||||
DATABASE = _database_config()["DATABASE"]
|
||||
|
||||
|
||||
def db_inputs(connection_type):
|
||||
"""
|
||||
a function to determine the type of database the user wants to work with and
|
||||
selects the corresponding connection to the db
|
||||
|
||||
Args:
|
||||
connection_type: type of db we are working with
|
||||
|
||||
Returns:
|
||||
corresponding command to connect to the db
|
||||
"""
|
||||
return {
|
||||
"mysql": 'mysql://{0}:{1}@{2}:{3}/{4}'.format(USER, PASSWORD, HOST, PORT, DATABASE),
|
||||
"sqlite": 'sqlite:///{0}'.format(DATABASE)
|
||||
}[connection_type]
|
||||
|
||||
|
||||
def create_connection(language):
|
||||
"""
|
||||
a function to create sqlite3 connections to db, it retries 100 times if connection returned an error
|
||||
a function to create connections to db, it retries 100 times if connection returned an error
|
||||
|
||||
Args:
|
||||
language: language
|
||||
|
||||
Returns:
|
||||
sqlite3 connection if success otherwise False
|
||||
connection if success otherwise False
|
||||
"""
|
||||
try:
|
||||
# retries
|
||||
for i in range(0, 100):
|
||||
try:
|
||||
return sqlite3.connect(os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
_builder(_core_config(), _core_default_config())["api_db_name"]))
|
||||
db_engine = create_engine(db_inputs(DB))
|
||||
Session = sessionmaker(bind=db_engine)
|
||||
session = Session()
|
||||
return session
|
||||
except:
|
||||
time.sleep(0.01)
|
||||
except:
|
||||
|
|
@ -41,59 +66,26 @@ def create_connection(language):
|
|||
return False
|
||||
|
||||
|
||||
def send_submit_query(query, language):
|
||||
def send_submit_query(session, language):
|
||||
"""
|
||||
a function to send submit based queries to db (such as insert and update or delete), it retries 100 times if
|
||||
connection returned an error.
|
||||
|
||||
Args:
|
||||
query: query to execute
|
||||
session: session to commit
|
||||
language: language
|
||||
|
||||
Returns:
|
||||
True if submitted success otherwise False
|
||||
"""
|
||||
conn = create_connection(language)
|
||||
if not conn:
|
||||
return False
|
||||
try:
|
||||
for i in range(1, 100):
|
||||
for _ in range(1, 100):
|
||||
try:
|
||||
c = conn.cursor()
|
||||
c.execute(query)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
session.commit()
|
||||
return True
|
||||
except:
|
||||
except Exception as _:
|
||||
time.sleep(0.01)
|
||||
except:
|
||||
warn(messages(language, "database_connect_fail"))
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def send_read_query(query, language):
|
||||
"""
|
||||
a function to send read based queries to db (such as select), it retries 100 times if connection returned an error.
|
||||
|
||||
Args:
|
||||
query: query to execute
|
||||
language: language
|
||||
|
||||
Returns:
|
||||
return executed query otherwise False
|
||||
"""
|
||||
conn = create_connection(language)
|
||||
if not conn:
|
||||
return False
|
||||
try:
|
||||
for i in range(1, 100):
|
||||
try:
|
||||
c = conn.cursor()
|
||||
return c.execute(query)
|
||||
except:
|
||||
time.sleep(0.01)
|
||||
except:
|
||||
except Exception as _:
|
||||
warn(messages(language, "database_connect_fail"))
|
||||
return False
|
||||
return False
|
||||
|
|
@ -124,20 +116,13 @@ def submit_report_to_db(date, scan_id, report_filename, events_num, verbose, api
|
|||
return True if submitted otherwise False
|
||||
"""
|
||||
info(messages(language, "inserting_report_db"))
|
||||
return send_submit_query("""
|
||||
INSERT INTO reports (
|
||||
date, scan_id, report_filename, events_num, verbose,
|
||||
api_flag, report_type, graph_flag, category, profile,
|
||||
scan_method, language, scan_cmd, ports
|
||||
)
|
||||
VALUES (
|
||||
"{0}", "{1}", "{2}", "{3}", "{4}",
|
||||
"{5}", "{6}", "{7}", "{8}", "{9}",
|
||||
"{10}", "{11}", "{12}", "{13}"
|
||||
);
|
||||
""".format(date, scan_id, report_filename, events_num, verbose,
|
||||
api_flag, report_type, graph_flag, category, profile,
|
||||
scan_method, language, scan_cmd, ports), language)
|
||||
session = create_connection(language)
|
||||
session.add(Report(
|
||||
date=date, scan_id=scan_id, report_filename=report_filename, events_num=events_num, verbose=verbose,
|
||||
api_flag=api_flag, report_type=report_type, graph_flag=graph_flag, category=category, profile=profile,
|
||||
scan_method=scan_method, language=language, scan_cmd=scan_cmd, ports=ports
|
||||
))
|
||||
return send_submit_query(session, language)
|
||||
|
||||
|
||||
def remove_old_logs(host, type, scan_id, language):
|
||||
|
|
@ -153,8 +138,10 @@ def remove_old_logs(host, type, scan_id, language):
|
|||
Returns:
|
||||
True if success otherwise False
|
||||
"""
|
||||
return send_submit_query("""delete from hosts_log where host="{0}" and type="{1}" and scan_id!="{2}" """
|
||||
.format(host, type, scan_id), language)
|
||||
session = create_connection(language)
|
||||
old_logs = session.query(HostsLog).filter(HostsLog.host == host, HostsLog.type == type, HostsLog.scan_id != scan_id)
|
||||
old_logs.delete(synchronize_session=False)
|
||||
return send_submit_query(session, language)
|
||||
|
||||
|
||||
def submit_logs_to_db(language, log):
|
||||
|
|
@ -168,22 +155,16 @@ def submit_logs_to_db(language, log):
|
|||
Returns:
|
||||
True if success otherwise False
|
||||
"""
|
||||
if type(log) == str:
|
||||
if isinstance(log, str):
|
||||
log = json.loads(log)
|
||||
return send_submit_query("""
|
||||
INSERT INTO hosts_log (
|
||||
host, date, port, type, category,
|
||||
description, username, password, scan_id, scan_cmd
|
||||
)
|
||||
VALUES (
|
||||
"{0}", "{1}", "{2}", "{3}", "{4}",
|
||||
"{5}", "{6}", "{7}", "{8}", "{9}"
|
||||
);
|
||||
""".format(log["HOST"], log["TIME"], log["PORT"], log["TYPE"], log["CATEGORY"],
|
||||
log["DESCRIPTION"].encode('utf8') if version() is 2 else log[
|
||||
"DESCRIPTION"],
|
||||
log["USERNAME"], log["PASSWORD"], log["SCAN_ID"], log["SCAN_CMD"]),
|
||||
language)
|
||||
|
||||
session = create_connection(language)
|
||||
session.add(HostsLog(
|
||||
host=log["HOST"], date=log["TIME"], port=log["PORT"], type=log["TYPE"], category=log["CATEGORY"],
|
||||
description=log["DESCRIPTION"].encode('utf8') if version() is 2 else log["DESCRIPTION"],
|
||||
username=log["USERNAME"], password=log["PASSWORD"], scan_id=log["SCAN_ID"], scan_cmd=log["SCAN_CMD"]
|
||||
))
|
||||
return send_submit_query(session, language)
|
||||
|
||||
|
||||
def __select_results(language, page):
|
||||
|
|
@ -200,28 +181,29 @@ def __select_results(language, page):
|
|||
"""
|
||||
page = int(page * 10 if page > 0 else page * -10) - 10
|
||||
selected = []
|
||||
session = create_connection(language)
|
||||
try:
|
||||
for data in send_read_query("""select * from reports where 1 order by id desc limit {0},10""".format(page),
|
||||
language):
|
||||
search_data = session.query(Report).order_by(Report.id.desc())[page:page + 11]
|
||||
for data in search_data:
|
||||
tmp = { # fix later, junks
|
||||
"id": data[0],
|
||||
"date": data[1],
|
||||
"scan_id": data[2],
|
||||
"report_filename": data[3],
|
||||
"events_num": data[4],
|
||||
"verbose": data[5],
|
||||
"api_flag": data[6],
|
||||
"report_type": data[7],
|
||||
"graph_flag": data[8],
|
||||
"category": data[9],
|
||||
"profile": data[10],
|
||||
"scan_method": data[11],
|
||||
"language": data[12],
|
||||
"scan_cmd": data[13],
|
||||
"ports": data[14]
|
||||
"id": data.id,
|
||||
"date": data.date,
|
||||
"scan_id": data.scan_id,
|
||||
"report_filename": data.report_filename,
|
||||
"events_num": data.events_num,
|
||||
"verbose": data.verbose,
|
||||
"api_flag": data.api_flag,
|
||||
"report_type": data.report_type,
|
||||
"graph_flag": data.graph_flag,
|
||||
"category": data.category,
|
||||
"profile": data.profile,
|
||||
"scan_method": data.scan_method,
|
||||
"language": data.language,
|
||||
"scan_cmd": data.scan_cmd,
|
||||
"ports": data.ports
|
||||
}
|
||||
selected.append(tmp)
|
||||
except:
|
||||
except Exception as _:
|
||||
return __structure(status="error", msg="database error!")
|
||||
return selected
|
||||
|
||||
|
|
@ -237,14 +219,15 @@ def __get_result(language, id):
|
|||
Returns:
|
||||
result file content (TEXT, HTML, JSON) if success otherwise and error in JSON type.
|
||||
"""
|
||||
session = create_connection(language)
|
||||
try:
|
||||
try:
|
||||
filename = send_read_query("""select report_filename from reports where id=\"{0}\";""".format(id),
|
||||
language).fetchone()[0]
|
||||
file_obj = session.query(Report).filter_by(id=id).first()
|
||||
filename = file_obj.report_filename
|
||||
return open(filename, 'rb').read(), 200
|
||||
except:
|
||||
except Exception as _:
|
||||
return jsonify(__structure(status="error", msg="cannot find the file!")), 400
|
||||
except:
|
||||
except Exception as _:
|
||||
return jsonify(__structure(status="error", msg="database error!")), 200
|
||||
|
||||
|
||||
|
|
@ -259,6 +242,7 @@ def __last_host_logs(language, page):
|
|||
Returns:
|
||||
an array of events in JSON type if success otherwise an error in JSON type
|
||||
"""
|
||||
session = create_connection(language)
|
||||
page = int(page * 10 if page > 0 else page * -10) - 10
|
||||
data_structure = {
|
||||
"host": "",
|
||||
|
|
@ -271,22 +255,19 @@ def __last_host_logs(language, page):
|
|||
}
|
||||
selected = []
|
||||
try:
|
||||
for host in send_read_query(
|
||||
"""select host from hosts_log where 1 group by host order by id desc limit {0},10""".format(
|
||||
page),
|
||||
language):
|
||||
for data in send_read_query(
|
||||
"""select host,port,type,category,description from hosts_log where host="{0}" group by type,port,username,"""
|
||||
"""password,description order by id desc""".format(host[0]), language):
|
||||
for host in session.query(HostsLog).group_by(HostsLog.host).order_by(HostsLog.id.desc())[page:page+11]:
|
||||
for data in session.query(HostsLog).filter(HostsLog.host==host).group_by(HostsLog.type, HostsLog.port,
|
||||
HostsLog.username, HostsLog.password, HostsLog.description).order_by(
|
||||
HostsLog.id.desc()):
|
||||
n = 0
|
||||
capture = None
|
||||
for selected_data in selected:
|
||||
if selected_data["host"] == host[0]:
|
||||
if selected_data["host"] == host.host:
|
||||
capture = n
|
||||
n += 1
|
||||
if capture is None:
|
||||
tmp = { # fix later, junks
|
||||
"host": data[0],
|
||||
"host": data.host,
|
||||
"info": {
|
||||
"open_ports": [],
|
||||
"scan_methods": [],
|
||||
|
|
@ -297,21 +278,21 @@ def __last_host_logs(language, page):
|
|||
selected.append(tmp)
|
||||
n = 0
|
||||
for selected_data in selected:
|
||||
if selected_data["host"] == host[0]:
|
||||
if selected_data["host"] == host.host:
|
||||
capture = n
|
||||
n += 1
|
||||
if data[0] == selected[capture]["host"]:
|
||||
if data[1] not in selected[capture]["info"]["open_ports"] and type(data[1]) is int:
|
||||
selected[capture]["info"]["open_ports"].append(data[1])
|
||||
if data[2] not in selected[capture]["info"]["scan_methods"]:
|
||||
if data.host == selected[capture]["host"]:
|
||||
if data.port not in selected[capture]["info"]["open_ports"] and isinstance(data.port, int):
|
||||
selected[capture]["info"]["open_ports"].append(data.port)
|
||||
if data.type not in selected[capture]["info"]["scan_methods"]:
|
||||
selected[capture]["info"][
|
||||
"scan_methods"].append(data[2])
|
||||
if data[3] not in selected[capture]["info"]["category"]:
|
||||
selected[capture]["info"]["category"].append(data[3])
|
||||
if data[4] not in selected[capture]["info"]["descriptions"]:
|
||||
"scan_methods"].append(data.type)
|
||||
if data.category not in selected[capture]["info"]["category"]:
|
||||
selected[capture]["info"]["category"].append(data.category)
|
||||
if data.description not in selected[capture]["info"]["descriptions"]:
|
||||
selected[capture]["info"][
|
||||
"descriptions"].append(data[4])
|
||||
except:
|
||||
"descriptions"].append(data.description)
|
||||
except Exception as _:
|
||||
return __structure(status="error", msg="database error!")
|
||||
if len(selected) == 0:
|
||||
return __structure(status="finished", msg="No more search results")
|
||||
|
|
@ -329,25 +310,25 @@ def __logs_by_scan_id(scan_id, language):
|
|||
Returns:
|
||||
an array with JSON events or an empty array
|
||||
"""
|
||||
try:
|
||||
logs = []
|
||||
for log in send_read_query(
|
||||
"select host,username,password,port,type,date,description from hosts_log where scan_id=\"{0}\"".format(
|
||||
scan_id), language):
|
||||
data = {
|
||||
"SCAN_ID": scan_id,
|
||||
"HOST": log[0],
|
||||
"USERNAME": log[1],
|
||||
"PASSWORD": log[2],
|
||||
"PORT": log[3],
|
||||
"TYPE": log[4],
|
||||
"TIME": log[5],
|
||||
"DESCRIPTION": log[6]
|
||||
}
|
||||
logs.append(data)
|
||||
return logs
|
||||
except:
|
||||
return []
|
||||
session = create_connection(language)
|
||||
# try:
|
||||
return_logs = []
|
||||
logs = session.query(HostsLog).filter(HostsLog.scan_id==scan_id).all()
|
||||
for log in logs:
|
||||
data = {
|
||||
"SCAN_ID": scan_id,
|
||||
"HOST": log.host,
|
||||
"USERNAME": log.username,
|
||||
"PASSWORD": log.password,
|
||||
"PORT": log.port,
|
||||
"TYPE": log.type,
|
||||
"TIME": log.date,
|
||||
"DESCRIPTION": log.description
|
||||
}
|
||||
return_logs.append(data)
|
||||
return return_logs
|
||||
# except:
|
||||
# return []
|
||||
|
||||
|
||||
def __logs_to_report_json(host, language):
|
||||
|
|
@ -362,23 +343,23 @@ def __logs_to_report_json(host, language):
|
|||
an array with JSON events or an empty array
|
||||
"""
|
||||
try:
|
||||
logs = []
|
||||
for log in send_read_query(
|
||||
"select scan_id,username,password,port,type,date,description from hosts_log where host=\"{0}\"".format(
|
||||
host), language):
|
||||
session = create_connection(language)
|
||||
return_logs = []
|
||||
logs = session.query(HostsLog).filter(HostsLog.host == host)
|
||||
for log in logs:
|
||||
data = {
|
||||
"SCAN_ID": log[0],
|
||||
"SCAN_ID": log.scan_id,
|
||||
"HOST": host,
|
||||
"USERNAME": log[1],
|
||||
"PASSWORD": log[2],
|
||||
"PORT": log[3],
|
||||
"TYPE": log[4],
|
||||
"TIME": log[5],
|
||||
"DESCRIPTION": log[6]
|
||||
"USERNAME": log.usernamr,
|
||||
"PASSWORD": log.password,
|
||||
"PORT": log.port,
|
||||
"TYPE": log.type,
|
||||
"TIME": log.date,
|
||||
"DESCRIPTION": log.description
|
||||
}
|
||||
logs.append(data)
|
||||
return logs
|
||||
except:
|
||||
return_logs.append(data)
|
||||
return return_logs
|
||||
except Exception as _:
|
||||
return []
|
||||
|
||||
|
||||
|
|
@ -393,20 +374,20 @@ def __logs_to_report_html(host, language):
|
|||
Returns:
|
||||
HTML report
|
||||
"""
|
||||
session = create_connection(language)
|
||||
try:
|
||||
logs = []
|
||||
for log in send_read_query(
|
||||
"select host,username,password,port,type,date,description from hosts_log where host=\"{0}\"".format(
|
||||
host), language):
|
||||
logs_data = session.query(HostsLog).filter(HostsLog.host == host).all()
|
||||
for log in logs_data:
|
||||
data = {
|
||||
"SCAN_ID": host,
|
||||
"HOST": log[0],
|
||||
"USERNAME": log[1],
|
||||
"PASSWORD": log[2],
|
||||
"PORT": log[3],
|
||||
"TYPE": log[4],
|
||||
"TIME": log[5],
|
||||
"DESCRIPTION": log[6]
|
||||
"SCAN_ID": log.scan_id,
|
||||
"HOST": host,
|
||||
"USERNAME": log.username,
|
||||
"PASSWORD": log.password,
|
||||
"PORT": log.port,
|
||||
"TYPE": log.type,
|
||||
"TIME": log.date,
|
||||
"DESCRIPTION": log.description
|
||||
}
|
||||
logs.append(data)
|
||||
from core.log import build_graph
|
||||
|
|
@ -421,13 +402,11 @@ def __logs_to_report_html(host, language):
|
|||
'DESCRIPTION', 'TIME')
|
||||
for value in logs:
|
||||
_table += _log_data.table_items.format(value['HOST'], value['USERNAME'], value['PASSWORD'],
|
||||
value['PORT'], value[
|
||||
'TYPE'], value['DESCRIPTION'],
|
||||
value['TIME'])
|
||||
value['PORT'], value['TYPE'], value['DESCRIPTION'], value['TIME'])
|
||||
_table += _log_data.table_end + '<p class="footer">' + messages("en", "nettacker_report") \
|
||||
.format(compatible.__version__, compatible.__code_name__, now()) + '</p>'
|
||||
return _table
|
||||
except:
|
||||
except Exception as _:
|
||||
return ""
|
||||
|
||||
|
||||
|
|
@ -443,6 +422,7 @@ def __search_logs(language, page, query):
|
|||
Returns:
|
||||
an array with JSON structure of founded events or an empty array
|
||||
"""
|
||||
session = create_connection(language)
|
||||
page = int(page * 10 if page > 0 else page * -10) - 10
|
||||
data_structure = {
|
||||
"host": "",
|
||||
|
|
@ -455,24 +435,30 @@ def __search_logs(language, page, query):
|
|||
}
|
||||
selected = []
|
||||
try:
|
||||
for host in send_read_query(
|
||||
"""select host from hosts_log where host like \"%%{0}%%\" or date like \"%%{0}%%\" or
|
||||
port like \"%%{0}%%\" or type like \"%%{0}%%\" or category like \"%%{0}%%\"
|
||||
or description like \"%%{0}%%\" or username like \"%%{0}%%\" or password
|
||||
like \"%%{0}%%\" or scan_id like \"%%{0}%%\" or scan_cmd like \"%%{0}%%\"
|
||||
group by host order by id desc limit {1},10""".format(query, page), language):
|
||||
for data in send_read_query(
|
||||
"""select host,port,type,category,description from hosts_log where host="{0}" group by type,port,username,"""
|
||||
"""password,description order by id desc""".format(host[0]), language):
|
||||
for host in session.query(HostsLog).filter(
|
||||
(HostsLog.host.like("%"+str(query)+"%"))
|
||||
| (HostsLog.date.like("%"+str(query)+"%"))
|
||||
| (HostsLog.port.like("%"+str(query)+"%"))
|
||||
| (HostsLog.type.like("%"+str(query)+"%"))
|
||||
| (HostsLog.category.like("%"+str(query)+"%"))
|
||||
| (HostsLog.description.like("%"+str(query)+"%"))
|
||||
| (HostsLog.username.like("%"+str(query)+"%"))
|
||||
| (HostsLog.password.like("%" + str(query) + "%"))
|
||||
| (HostsLog.scan_id.like("%" + str(query) + "%"))
|
||||
| (HostsLog.scan_cmd.like("%" + str(query) + "%"))
|
||||
).group_by(HostsLog.host).order_by(HostsLog.id.desc())[page:page+11]:
|
||||
for data in session.query(HostsLog).filter(HostsLog.host==str(host.host)).group_by(HostsLog.type, HostsLog.port,
|
||||
HostsLog.username, HostsLog.password, HostsLog.description).order_by(
|
||||
HostsLog.id.desc()).all():
|
||||
n = 0
|
||||
capture = None
|
||||
for selected_data in selected:
|
||||
if selected_data["host"] == host[0]:
|
||||
if selected_data["host"] == host.host:
|
||||
capture = n
|
||||
n += 1
|
||||
if capture is None:
|
||||
tmp = { # fix later, junks
|
||||
"host": data[0],
|
||||
"host": data.host,
|
||||
"info": {
|
||||
"open_ports": [],
|
||||
"scan_methods": [],
|
||||
|
|
@ -483,21 +469,21 @@ def __search_logs(language, page, query):
|
|||
selected.append(tmp)
|
||||
n = 0
|
||||
for selected_data in selected:
|
||||
if selected_data["host"] == host[0]:
|
||||
if selected_data["host"] == host.host:
|
||||
capture = n
|
||||
n += 1
|
||||
if data[0] == selected[capture]["host"]:
|
||||
if data[1] not in selected[capture]["info"]["open_ports"] and type(data[1]) is int:
|
||||
selected[capture]["info"]["open_ports"].append(data[1])
|
||||
if data[2] not in selected[capture]["info"]["scan_methods"]:
|
||||
if data.host == selected[capture]["host"]:
|
||||
if data.port not in selected[capture]["info"]["open_ports"] and isinstance(data.port, int):
|
||||
selected[capture]["info"]["open_ports"].append(data.port)
|
||||
if data.type not in selected[capture]["info"]["scan_methods"]:
|
||||
selected[capture]["info"][
|
||||
"scan_methods"].append(data[2])
|
||||
if data[3] not in selected[capture]["info"]["category"]:
|
||||
selected[capture]["info"]["category"].append(data[3])
|
||||
if data[4] not in selected[capture]["info"]["descriptions"]:
|
||||
"scan_methods"].append(data.type)
|
||||
if data.category not in selected[capture]["info"]["category"]:
|
||||
selected[capture]["info"]["category"].append(data.category)
|
||||
if data.description not in selected[capture]["info"]["descriptions"]:
|
||||
selected[capture]["info"][
|
||||
"descriptions"].append(data[4])
|
||||
except:
|
||||
"descriptions"].append(data.description)
|
||||
except Exception as _:
|
||||
return __structure(status="error", msg="database error!")
|
||||
if len(selected) == 0:
|
||||
return __structure(status="finished", msg="No more search results")
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy import Column, Integer, Text
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class Report(Base):
|
||||
"""
|
||||
This class defines the table schema of the reports table. Any changes to the reports table need to be done here.
|
||||
"""
|
||||
__tablename__ = 'reports'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
date = Column(Text)
|
||||
scan_id = Column(Text)
|
||||
report_filename = Column(Text)
|
||||
events_num = Column(Integer)
|
||||
verbose = Column(Integer)
|
||||
api_flag = Column(Integer)
|
||||
report_type = Column(Text)
|
||||
graph_flag = Column(Text)
|
||||
category = Column(Text)
|
||||
profile = Column(Text)
|
||||
scan_method = Column(Text)
|
||||
language = Column(Text)
|
||||
scan_cmd = Column(Text)
|
||||
ports = Column(Text)
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
returns a printable representation of the object of the class Report
|
||||
"""
|
||||
return "<Report(id={0}, scan_id={1}, date={2})>".format(self.id, self.scan_id, self.date)
|
||||
|
||||
|
||||
class HostsLog(Base):
|
||||
"""
|
||||
This class defines the table schema of the hosts_log table. Any changes to the reports hosts_log need to be done here.
|
||||
"""
|
||||
__tablename__ = 'hosts_log'
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
host = Column(Text)
|
||||
date = Column(Text)
|
||||
scan_id = Column(Text)
|
||||
scan_cmd = Column(Text)
|
||||
username = Column(Text)
|
||||
password = Column(Text)
|
||||
description = Column(Text)
|
||||
port = Column(Text)
|
||||
category = Column(Text)
|
||||
type = Column(Text)
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
returns a printable representation of the object of the class HostsLog
|
||||
"""
|
||||
return "<HostsLog(id={0}, host={1}, date={2})>".format(self.id, self.host, self.date)
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from core.config import _database_config
|
||||
from database.models import Base
|
||||
|
||||
|
||||
USER = _database_config()["USERNAME"]
|
||||
PASSWORD = _database_config()["PASSWORD"]
|
||||
HOST = _database_config()["HOST"]
|
||||
PORT = _database_config()["PORT"]
|
||||
DATABASE = _database_config()["DATABASE"]
|
||||
|
||||
|
||||
def mysql_create_database():
|
||||
"""
|
||||
when using mysql database, this is the function that is used to create the database for the first time when you run
|
||||
the nettacker module.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
True if success otherwise False
|
||||
"""
|
||||
try:
|
||||
engine = create_engine('mysql://{0}:{1}@{2}:{3}'.format(USER, PASSWORD, HOST, PORT))
|
||||
existing_databases = engine.execute("SHOW DATABASES;")
|
||||
existing_databases = [d[0] for d in existing_databases]
|
||||
if DATABASE not in existing_databases:
|
||||
engine.execute("CREATE DATABASE {0} ".format(DATABASE))
|
||||
return True
|
||||
except Exception as _:
|
||||
return False
|
||||
|
||||
|
||||
def mysql_create_tables():
|
||||
"""
|
||||
when using mysql database, this is the function that is used to create the tables in the database for the first
|
||||
time when you run the nettacker module.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
True if success otherwise False
|
||||
"""
|
||||
try:
|
||||
db_engine = create_engine('mysql://{0}:{1}@{2}:{3}/{4}'.format(USER, PASSWORD, HOST, PORT, DATABASE))
|
||||
Base.metadata.create_all(db_engine)
|
||||
return True
|
||||
except Exception as _:
|
||||
return False
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
OWASP Nettacker Database Files
|
||||
=======================
|
||||
This folder mainly contains all the files which handle the database transactions for the OWASP Nettacker.
|
||||
|
||||
* `db.py` contains the database transaction functions
|
||||
* `models.py` contains the database structure layout
|
||||
* `mysql_create.py` contains functions to create the db structure mentioned in `models.py` into a MySQL database
|
||||
* `sqlite_create.py` contains functions to create the db structure mentioned in `models.py` into a SQLite database
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
from database.models import Base
|
||||
from core.config import _database_config
|
||||
|
||||
|
||||
DATABASE = _database_config()["DATABASE"]
|
||||
|
||||
|
||||
def sqlite_create_tables():
|
||||
"""
|
||||
when using sqlite database, this is the function that is used to create the database schema for the first time when
|
||||
you run the nettacker module.
|
||||
|
||||
Args:
|
||||
None
|
||||
|
||||
Returns:
|
||||
True if success otherwise False
|
||||
"""
|
||||
try:
|
||||
db_engine = create_engine('sqlite:///{0}'.format(DATABASE))
|
||||
Base.metadata.create_all(db_engine)
|
||||
return True
|
||||
except Exception as _:
|
||||
return False
|
||||
|
||||
|
|
@ -211,5 +211,7 @@ def all_messages():
|
|||
"no_response": "cannot get response from target",
|
||||
"category_framework": "category: {0}, frameworks: {1} found!",
|
||||
"nothing_found": "nothing found on {0} in {1}!",
|
||||
"no_auth": "No auth found on {0}:{1}"
|
||||
"no_auth": "No auth found on {0}:{1}",
|
||||
"invalid_database": "Please select from mysql or sqlite in the configuration file",
|
||||
"database_connection_failed": "Connection to the selected db failed"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ lockfile==0.12.2
|
|||
bs4==0.0.1
|
||||
pycurl==7.43.0.1
|
||||
pyspf==2.0.12t
|
||||
sqlalchemy==1.2.6
|
||||
py3DNS; python_version > '3'
|
||||
scapy-python3; python_version > '3'
|
||||
pyDNS; python_version < '3'
|
||||
|
|
|
|||
Loading…
Reference in New Issue