Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
with open(output_file, 'w') as f:
nbf.write(nb, f)
return 0
intro = """# How to run the """ + input_file[:-3] + """ model."""
nb['cells'].append(nbf.v4.new_markdown_cell(intro))
line = fp.readline()
# Skip comments
if '"""' in line:
line = skip_comments(fp, line)
nb['cells'].append(nbf.v4.new_markdown_cell(
"First we import all necessary files."))
# Import block
line, import_code = import_block(fp, line)
parts = input_file.split('/')
import_code += "from models." + parts[-1][:REMOVE_EXT] + " import set_up"
nb['cells'].append(nbf.v4.new_code_cell(import_code))
nb['cells'].append(nbf.v4.new_markdown_cell(
"We then initialize global variables."))
# Globals block
line, global_code = globals_block(fp, line)
nb['cells'].append(nbf.v4.new_code_cell(global_code))
def cell_marker(text):
return nbformat.v4.new_markdown_cell(source=_cell_marker_format(text))
def add_markdown_cell(self, markdown):
"""
Add the given markdown as a new markdown cell.
:param markdown:
"""
cell = nbformat.v4.new_markdown_cell(markdown)
self.notebook["cells"].append(cell)
def gmd(c): return nbf.v4.new_markdown_cell(c)
# Collect .mdl Data
try:
with open(model_path, 'r') as json_file:
json_data = json.loads(json_file.read())
json_data['name'] = name
except FileNotFoundError as e:
raise ModelNotFoundError('Could not read the file: ' + str(e))
except JSONDecodeError as e:
raise ModelNotJSONFormatError('The data is not JSON decobable: ' + str(e))
is_ode = json_data['defaultMode'] == "continuous" if settings is None else settings['simulationSettings']['algorithm'] == "ODE"
gillespy2_model = ModelFactory(json_data, is_ode).model
# Create new notebook
cells = []
# Create Markdown Cell with name
cells.append(nbf.new_markdown_cell('# {0}'.format(name)))
try:
# Create imports cell
cells.append(nbf.new_code_cell(
generate_imports_cell(json_data, gillespy2_model,
interactive_backend=True)))
# Create Model Cell
cells.append(nbf.new_code_cell(generate_model_cell(json_data, name)))
# Instantiate Model Cell
cells.append(nbf.new_code_cell('model = {0}()'.format(name)))
# Sciope Wrapper Cell
cells.append(nbf.new_code_cell(generate_sciope_wrapper_cell(json_data,
gillespy2_model)))
# Sciope lhc Cell
cells.append(nbf.new_code_cell(generate_sciope_lhc_cell()))
# Sciope stochmet Cell
cells.append(nbf.new_code_cell(generate_sciope_stochmet_cell()))
nb['cells'].append(nbf.v4.new_code_cell(
"""spark = SparkSession.builder.getOrCreate()
spark.conf.set("spark.sql.caseSensitive", "true")"""
))
# **** DECOMPRESS HOST DATASETS ****
nb['cells'].append(nbf.v4.new_markdown_cell("## Decompress Dataset"))
nb['cells'].append(nbf.v4.new_code_cell("!wget https://github.com/hunters-forge/mordor/raw/master/datasets/large/apt29/day1/apt29_evals_day1_manual.zip"))
nb['cells'].append(nbf.v4.new_code_cell("!unzip apt29_evals_day1_manual.zip"))
# **** IMPORT HOST DATASETS ****
nb['cells'].append(nbf.v4.new_markdown_cell("## Import Datasets"))
nb['cells'].append(nbf.v4.new_code_cell("df_day1_host = spark.read.json('apt29_evals_day1_manual_2020-05-01225525.json')"))
# **** CREATE TEMPORARY SQL VIEW ****
nb['cells'].append(nbf.v4.new_markdown_cell("## Create Temporary SQL View"))
nb['cells'].append(nbf.v4.new_code_cell("df_day1_host.createTempView('apt29Host')"))
# **** ADVERSARY - DETECTION STEPS ****
nb['cells'].append(nbf.v4.new_markdown_cell("## Adversary - Detection Steps"))
for yaml in yaml_loaded:
print(" [>] Processing Step {}..".format(yaml['step']))
# **** MAIN STEPS ****
nb['cells'].append(nbf.v4.new_markdown_cell("""## {}. {}
**Procedure:** {}
\n**Criteria:** {}
""".format(yaml['step'],yaml['technique']['name'],yaml['procedure'],yaml['criteria'])))
# **** DETECTIONS ****
for detection in yaml['detections']:
nb['cells'].append(nbf.v4.new_markdown_cell("### Detection Type:{}({})".format(detection['main_type'],detection['modifier_type'])))
if detection['queries']:
# **** AVAILABLE QUERIES ****
'test_suite': components[2],
'test': components[3].strip(),
'exercise': exercises[key].strip(),
}
contents_1 = UNITTEST_TEMPLATE_1.format(**template_dict)
contents_2 = UNITTEST_TEMPLATE_2.format(**template_dict)
else:
hints = ':'.join(components[1:]).strip()
template_dict = {
'num': key[8:],
'exercise': exercises[key],
'hints': hints,
}
contents_1 = PRACTICE_TEMPLATE_1.format(**template_dict)
contents_2 = PRACTICE_TEMPLATE_2.format(**template_dict)
cells.append(nbformat.v4.new_markdown_cell(contents_1))
cells.append(nbformat.v4.new_code_cell(contents_2))
elif raw_cell.startswith('example'):
key = raw_cell.strip()
cells.append(nbformat.v4.new_code_cell(examples[key].strip()))
else:
raise RuntimeError
nbformat.write(notebook, '{}/Chapter{}.ipynb'.format(path, chapter))
""".format(analytic['id'], analytic['author'], analytic['creation_date'], analytic['platform'], analytic_playbook_link)
))
# *** TECHNICAL DESCRIPTION ****
nb['cells'].append(nbf.v4.new_markdown_cell("""## Technical Description
{}""".format(analytic['description'])))
# *** HYPOTHESIS ****
nb['cells'].append(nbf.v4.new_markdown_cell("""## Hypothesis
{}""".format(analytic['hypothesis'])))
# *** ANALYTICS ****
nb['cells'].append(nbf.v4.new_markdown_cell("## Analytics"))
nb['cells'].append(nbf.v4.new_markdown_cell("### Initialize Analytics Engine"))
nb['cells'].append(nbf.v4.new_code_cell(
"""from openhunt.mordorutils import *
spark = get_spark()"""
))
nb['cells'].append(nbf.v4.new_markdown_cell("### Download & Process Mordor File"))
nb['cells'].append(nbf.v4.new_code_cell(
"""mordor_file = "{}"
registerMordorSQLTable(spark, mordor_file, "mordorTable")""".format(analytic['validation_dataset'][0]['url'])
))
for a in analytic['analytics']:
nb['cells'].append(nbf.v4.new_markdown_cell("### {}".format(a['name'])))
nb['cells'].append(nbf.v4.new_markdown_cell(
"""
| FP Rate | Log Channel | Description |
| :--------| :-----------| :-------------|
| {} | {} | {} |
""".format(a['false_positives'], a['data_sources'], a['description'])
))
nb['cells'].append(nbf.v4.new_code_cell(
"""df = spark.sql(
'''
cell starting with a level 1 heading. It inserts a label with the notebook
name just underneath this heading.
"""
assert cell.cell_type == 'markdown', cell.cell_type
lines = cell.source.splitlines()
if lines[0].startswith('# '):
header_lines = 1
elif len(lines) > 1 and lines[1].startswith('==='):
header_lines = 2
else:
raise NoHeader
header = '\n'.join(lines[:header_lines])
intro_remainder = '\n'.join(lines[header_lines:]).strip()
res = [
new_markdown_cell(header),
new_latex_cell('\label{sec:%s}' % nbname)
]
res[0].metadata = cell.metadata
if intro_remainder:
res.append(new_markdown_cell(intro_remainder))
return res
print("No results returned from the query.")
"""
if __name__ == "__main__":
nb = nbf.v4.new_notebook()
connection_code = db_connection_template.format(os.getenv('NEO4J_HOST'), os.getenv('NEO4J_USERNAME'), os.getenv('NEO4J_PASSWORD'))
nb['cells'] = [nbf.v4.new_code_cell(connection_code)]
query_path = "queries/"
queries = get_all_queries(query_path)
for query in queries:
with open(query, "r") as stream:
q = yaml.safe_load(stream)
nb["cells"].append(nbf.v4.new_markdown_cell("## " + q["name"]))
nb["cells"].append(nbf.v4.new_markdown_cell(q["description"]))
if q["type"] == "graph":
nb['cells'].append(nbf.v4.new_code_cell(graph_query_template.format(q["query"])))
if q["type"] == "table":
nb['cells'].append(nbf.v4.new_code_cell(table_query_template.format(q["query"])))
nbf.write(nb, 'result.ipynb')