Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def run_notebook(self, notebook_filename):
"""Copy to a sandbox"""
nb_dir, nb_name = os.path.split(notebook_filename)
sandboxed_nb = os.path.join(self.sandboxdir, nb_name)
shutil.copy2(notebook_filename, sandboxed_nb)
with open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python3')
ep.extra_arguments = ['--Application.log_level=0']
print("Executing notebook %s in %s" % (notebook_filename, self.sandboxdir))
ep.preprocess(nb, {'metadata': {'path': self.sandboxdir}})
def run_notebook(path):
nb_name, _ = os.path.splitext(os.path.basename(path))
dirname = os.path.dirname(path)
with open(path) as f:
nb = nbformat.read(f, as_version=4)
print("Start ", path)
sys.stdout.flush()
proc = ExecutePreprocessor(timeout=600, kernel_name='python3')
proc.allow_errors = True
proc.preprocess(nb, {'metadata': {'path': dirname}})
errors = []
for cell in nb.cells:
if 'outputs' in cell:
for output in cell['outputs']:
if output.output_type == 'error':
errors.append(output)
if errors == []:
print(" " + path + " test successfully completed.")
Raises
------
RuntimeError if there is no kernel spec matching the one specified in
the notebook or forced via configuration.
Returns
-------
object
Notebook object from nbformat
"""
parts = urlparse(uri)
if parts.netloc == '' or parts.netloc == 'file':
# Local file
with open(parts.path) as nb_fh:
notebook = nbformat.read(nb_fh, 4)
else:
# Remote file
import requests
resp = requests.get(uri)
resp.raise_for_status()
notebook = nbformat.reads(resp.text, 4)
# Error if no kernel spec can handle the language requested
kernel_name = self.force_kernel_name if self.force_kernel_name \
else notebook['metadata']['kernelspec']['name']
self.kernel_spec_manager.get_kernel_spec(kernel_name)
return notebook
def format_file_in_place(
src: Path,
line_length: int,
write_back: black.WriteBack,
mode: black.FileMode,
clear_output: bool,
sub_report: "SubReport",
) -> "SubReport":
"""
Format file under `src` path. Return True if changed.
If `write_back` is YES, write reformatted code to the file.
"""
with src.open() as fp:
src_contents = nbformat.read(fp, as_version=nbformat.NO_CONVERT)
dst_cells: List[Dict] = []
for cell in src_contents["cells"]:
if cell["cell_type"] == "code":
try:
cell["source"] = format_cell_source(
cell["source"], line_length=line_length, mode=mode
)
sub_report.done(black.Changed.YES)
except black.NothingChanged:
sub_report.done(black.Changed.NO)
except black.InvalidInput:
sub_report.failed()
if clear_output:
try:
cell["outputs"], cell[
def get_pipeline_metrics(request, source_notebook_path):
"""Get the pipeline metrics tagged in the notebook."""
# read notebook
log = request.log if hasattr(request, "log") else logger
try:
notebook = nbformat.read(source_notebook_path,
as_version=nbformat.NO_CONVERT)
metrics_source = parser.get_pipeline_metrics_source(notebook)
if metrics_source == '':
raise ValueError("No pipeline metrics found. Please tag a cell"
" of the notebook with the `pipeline-metrics`"
" tag.")
# get a dict from the 'pipeline parameters' cell source code
metrics = ast.parse_metrics_print_statements(metrics_source)
except ValueError as e:
log.exception("Failed to parse pipeline metrics")
raise RPCInternalError(details=str(e), trans_id=request.trans_id)
log.info("Pipeline metrics: {}".format(metrics))
return metrics
def run_notebook(path):
"Executes notebook `path` and shows any exceptions. Useful for testing"
nb = nbformat.read(open(path), as_version=nbformat.NO_CONVERT)
ExecutePreprocessor(timeout=600).preprocess(nb, {})
print('done')
write dataframes to files
:param nb_path: path to a notebook.
:param pkg_dir: Directory to which dataframes are materialized
:param dataframes: List of names of dataframes to materialize
:return: a Notebook object
"""
import nbformat
from metapack.jupyter.preprocessors import AddEpilog
from metapack.jupyter.exporters import ExecutePreprocessor, Config
from os.path import dirname, join, splitext, basename
from nbconvert.preprocessors.execute import CellExecutionError
with open(nb_path) as f:
nb = nbformat.read(f, as_version=4)
root, ext = splitext(basename(nb_path))
c = Config()
nb, resources = AddEpilog(config=c, pkg_dir=pkg_dir,
dataframes=dataframes
).preprocess(nb, {})
try:
ep = ExecutePreprocessor(config=c)
nb, _ = ep.preprocess(nb, {'metadata': {'path': dirname(nb_path)}})
except CellExecutionError as e:
err_nb_path = join(dirname(nb_path), root + '-errors' + ext)
with open(err_nb_path, 'wt') as f:
def notebook_to_markdown(path: Union[Path, str]) -> str:
"""
Convert jupyter notebook to hugo-formatted markdown string
Args:
path: path to notebook
Returns: hugo-formatted markdown
"""
# first, update the notebook's metadata
update_notebook_metadata(path)
with open(Path(path)) as fp:
notebook = nbformat.read(fp, as_version=4)
assert 'front-matter' in notebook['metadata'], "You must have a front-matter field in the notebook's metadata"
front_matter_dict = dict(notebook['metadata']['front-matter'])
front_matter = json.dumps(front_matter_dict, indent=2)
c = Config()
c.MarkdownExporter.preprocessors = [CustomPreprocessor]
markdown_exporter = MarkdownExporter(config=c)
markdown, _ = markdown_exporter.from_notebook_node(notebook)
doctored_md = doctor(markdown)
# added comment to prevent summary creation
output = '\n'.join(('---', front_matter, '---', '', doctored_md))
return output
def notebook_content(self):
filepath = os.path.join(self.path, self.name)
with io.open(filepath, 'r', encoding='utf-8') as f:
try:
nb = nbformat.read(f, as_version=4)
except Exception as e:
nb = None
return nb