Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _execute_notebook(self, filename, qe_token=None, qe_url=None):
# Create the preprocessor.
execute_preprocessor = ExecutePreprocessor(timeout=TIMEOUT,
kernel_name=JUPYTER_KERNEL)
# Read the notebook.
with open(filename) as file_:
notebook = nbformat.read(file_, as_version=4)
if qe_token and qe_url:
top_str = "from qiskit import IBMQ\n"
top_str += "IBMQ.enable_account('{token}', '{url}')".format(token=qe_token,
url=qe_url)
top = nbformat.notebooknode.NotebookNode({'cell_type': 'code',
'execution_count': 0,
'metadata': {},
'outputs': [],
'source': top_str})
notebook.cells = [top] + notebook.cells
# Run the notebook into the folder containing the `qiskit/` module.
execute_preprocessor.preprocess(
notebook, {'metadata': {'path': self.execution_path}})
def edit_notebook(self, nb):
"""
Inject the code needed to setup and shutdown spark and sc magic variables.
"""
from nbformat.notebooknode import NotebookNode
from textwrap import dedent
preamble_node = NotebookNode(cell_type="code", source=dedent("""
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("NotebookTestSuite").master("local[*]").getOrCreate()
globals()["spark"] = spark
globals()["sc"] = spark.sparkContext
"""))
epilogue_node = NotebookNode(cell_type="code", source=dedent("""
try:
spark.stop()
except:
pass
"""))
nb.cells.insert(0, preamble_node)
nb.cells.append(epilogue_node)
return nb
def test_fl_with_trainconfig(isolated_filesystem, start_remote_server_worker_only, hook):
os.chdir("advanced/Federated Learning with TrainConfig/")
notebook = "Introduction to TrainConfig.ipynb"
p_name = Path("examples/tutorials/advanced/Federated Learning with TrainConfig/")
not_excluded_notebooks.remove(p_name / notebook)
hook.local_worker.remove_worker_from_registry("alice")
kwargs = {"id": "alice", "host": "localhost", "port": 8777, "hook": hook}
data = torch.tensor([[0.0, 1.0], [1.0, 0.0], [1.0, 1.0], [0.0, 0.0]], requires_grad=True)
target = torch.tensor([[1.0], [1.0], [0.0], [0.0]], requires_grad=False)
dataset = sy.BaseDataset(data, target)
process_remote_worker = start_remote_server_worker_only(dataset=(dataset, "xor"), **kwargs)
res = pm.execute_notebook(notebook, "/dev/null", timeout=300)
assert isinstance(res, nbformat.notebooknode.NotebookNode)
process_remote_worker.terminate()
sy.VirtualWorker(id="alice", hook=hook, is_client_worker=False)
for notebook in notebooks:
list_name = Path("examples/tutorials/") / notebook
if list_name in not_excluded_notebooks:
not_excluded_notebooks.remove(list_name)
res = pm.execute_notebook(
notebook,
"/dev/null",
parameters={
"epochs": 1,
"n_test_batches": 5,
"n_train_items": 64,
"n_test_items": 64,
},
timeout=300,
)
assert isinstance(res, nbformat.notebooknode.NotebookNode)
def edit_notebook(self, nb):
"""
Inject the code needed to setup and shutdown spark and sc magic variables.
"""
from nbformat.notebooknode import NotebookNode
from textwrap import dedent
preamble_node = NotebookNode(cell_type="code", source=dedent("""
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("NotebookTestSuite").master("local[*]").getOrCreate()
globals()["spark"] = spark
globals()["sc"] = spark.sparkContext
"""))
epilogue_node = NotebookNode(cell_type="code", source=dedent("""
try:
spark.stop()
except:
pass
"""))
nb.cells.insert(0, preamble_node)
nb.cells.append(epilogue_node)
return nb
tags = knowledge_repo_data.get('tags')
tldr = knowledge_repo_data.get('tldr')
private = knowledge_repo_data.get('private', False)
updated = datetime.now()
path = nb_path
# if already have a record
if 'knowledge' not in nb.metadata or not exists(nb.metadata.knowledge.post_path):
# new knowledge post, generate id and path for KR copy
id = str(abs(_gen_hash(nb)))
created = datetime.now()
post_folder = join('knowledgerepos', id)
post_path = join(post_folder, 'post.ipynb')
nb.metadata.knowledge = nbformat.notebooknode.NotebookNode()
nb.cells.insert(0, nbformat.notebooknode.NotebookNode())
nb.cells[0].cell_type = 'raw'
nb.cells[0].metadata = {}
nb.cells[0].source = metadata_to_header(title, authors, tags, tldr, created, updated, private)
# make directory
if not exists(post_folder):
makedirs(post_folder)
else:
# existing knowledge post, fetch data from metadata
created = datetime.strptime(nb.metadata.knowledge.created, '%Y-%m-%d %H:%M:%S')
id = nb.metadata.knowledge.id
post_path = nb.metadata.knowledge.post_path
nb.metadata.knowledge.title = title
nb.metadata.knowledge.authors = authors
try:
self.executePreprocessor.preprocess(notebook, {'metadata': {'path': '.' }})
if _debugging: logging.info('Result notebook: ' + nbformat.v4.writes_json(notebook))
if len(notebook.cells) < 1 or len(notebook.cells[0].outputs) < 1:
return None
return self.postprocess_output(notebook.cells[0].outputs)
except:
exc_type, exc_obj, exc_tb = sys.exc_info()
msg = None
if _debugging:
msg = '\n'.join(traceback.format_exception_only(exc_type, exc_obj) + traceback.format_tb(exc_tb))
else:
msg = '\n'.join(traceback.format_exception_only(exc_type, exc_obj))
out = NotebookNode(output_type = 'error', html = RClansiconv(msg + '\n'))
return [out]
def postprocess_output(self, outputs):
"""
Postprocesses output and maps mime types to ones accepted by R.
"""
res = []
for output in outputs:
msg_type = output.output_type
content = output
out = NotebookNode(output_type = msg_type)
if msg_type in ('display_data', 'execute_result'):
for mime, data in content['data'].items():
try:
attr = self.MIME_MAP[mime]
if attr == 'text':
tmpval = RClansiconv(data)
else:
tmpval = data
setattr(out, attr, tmpval)
except KeyError:
raise NotImplementedError('unhandled mime type: %s' % mime)
elif msg_type == 'stream':
setattr(out, 'text', RClansiconv(content['text']))
elif msg_type == 'error':
setattr(out, 'html', RClansiconv('\n'.join(content['traceback']) + '\n'))
else:
def add_code_cell(self, source, *outputs, **kwargs):
if isinstance(source, list):
source = '\n'.join(line.rstrip() for line in source)
self.nb['cells'].append(nbf.v4.new_code_cell(
source,
execution_count=self.exec_count,
outputs=[
encode_output(output, self.exec_count)
if not isinstance(output, nbf.notebooknode.NotebookNode)
else output
for output in outputs
if output is not None
],
**kwargs
))
self.exec_count += 1