Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
from rich.progress import track
import time
for step in track(range(1)):
# do_step(step)
if step == 50:
exit(1)
time.sleep(10)
)
cam3 = buildcam(
dict(
position=[1862.135, -4020.792, -36292.348],
focal=[6587.835, 3849.085, 5688.164],
viewup=[0.185, -0.97, 0.161],
distance=42972.44,
clipping=[29629.503, 59872.10],
)
)
# ------------------------------- Create frames ------------------------------ #
# Create frames
prev_neurons = []
for step in track(
np.arange(N_FRAMES), total=N_FRAMES, description="Generating frames..."
):
if step % N_frames_for_change == 0: # change neurons every N framse
# reset neurons from previous set of neurons
for neuron in prev_neurons:
for component, actor in neuron.items():
actor.alpha(minalpha)
actor.color(darkcolor)
prev_neurons = []
# highlight new neurons
neurons = choices(neurons_actors, k=N_neurons_in_frame)
for n, neuron in enumerate(neurons):
color = colorMap(
n, "Greens_r", vmin=-2, vmax=N_neurons_in_frame + 3
def main(self):
with open("test_main_generated.py", "w") as f:
f.write("from ciphey.__main__ import main, make_default_config")
print("Opened fild")
for i in track(range(1, self.HOW_MANY_TESTS)):
print("In the for loop")
x = self.enCiphey_obj.getRandomEncryptedSentence()
print(x)
# if x["CipherUsed"] == "MorseCode":
# self.make_test_lc_true_template(cipher=x)
to_append = self.make_test_lc_true_template(cipher=x)
print(f"Adding {to_append}")
f.write(to_append)
:param savedir: str, folder in which to save the image
:param imagesids: list of int with images IDs
:param downsample: downsample factor, to reduce the image size and resolution (Default value = 0)
:param annotated: if True the images are overlayed with annotations (Default value = True)
:param snames: if True the images are overlayed with the structures names (Default value = None)
:param atlas_svg: if True fetches the images as SVG, otherwise as PNG (Default value = True)
"""
savedir = Path(savedir)
savedir.mkdir(exist_ok=True)
curdir = Path.cwd()
chdir(savedir)
for i, imgid in track(
enumerate(imagesids),
total=len(imagesids),
description="downloading iamges...",
):
if not atlas_svg and not annotated:
savename = str(imgid) + ".jpg"
elif not atlas_svg and annotated:
savename = str(imgid) + "_annotated.jpg"
else:
savename = str(imgid) + ".svg"
if snames is not None:
sname, ext = savename.split(".")
savename = (
sname + "_sect{}_img{}.".format(snames[i], i + 1) + ext
)
]
# We use gather to create a single task from a set of tasks
# which download CVEs for each version of curl. Otherwise
# the progress bar would show that we are closer to
# completion than we think, because lots of curl CVEs (for
# each version) have been downloaded
tasks.append(
asyncio.gather(
*[
self.download_curl_version(self.session, version)
for version in curl_metadata
],
)
)
total_tasks = len(nvd_metadata) + 1
for task in track(
asyncio.as_completed(tasks),
description="Downloading CVEs...",
total=total_tasks,
):
await task
self.was_updated = True
await self.session.close()
self.session = None
def download_streamlines(eids, streamlines_folder=None): # pragma: no cover
"""
Given a list of expeirmental IDs, it downloads the streamline data from the https://neuroinformatics.nl cache and saves them as
json files.
:param eids: list of integers with experiments IDs
:param streamlines_folder: str path to the folder where the JSON files should be saved, if None the default is used (Default value = None)
"""
streamlines_folder = Path(streamlines_folder)
if not isinstance(eids, (list, np.ndarray, tuple)):
eids = [eids]
filepaths, data = [], []
for eid in track(eids, total=len(eids), description="downloading"):
url = make_url_given_id(eid)
jsonpath = streamlines_folder / f"{eid}.json"
filepaths.append(str(jsonpath))
if not jsonpath.exists():
response = request(url)
# Write the response content as a temporary compressed file
temp_path = streamlines_folder / "temp.gz"
with open(str(temp_path), "wb") as temp:
temp.write(response.content)
# Open in pandas and delete temp
url_data = pd.read_json(
str(temp_path), lines=True, compression="gzip"
)
if isinstance(color[0], (float, int)): # it's an rgb color
color = [color for i in sl_file]
elif len(color) != len(sl_file):
raise ValueError(
"Wrong number of colors, should be one per streamline or 1"
)
else:
color = [color for i in sl_file]
else:
color = ["salmon" for i in sl_file]
actors = []
if isinstance(
sl_file[0], (str, pd.DataFrame)
): # we have a list of files to add
for slf, col in track(
zip(sl_file, color),
total=len(sl_file),
description="parsing streamlines",
):
if isinstance(slf, str):
streamlines = parse_streamline(
color=col, filepath=slf, *args, **kwargs
)
else:
streamlines = parse_streamline(
color=col, data=slf, *args, **kwargs
)
actors.extend(streamlines)
else:
raise ValueError(
"unrecognized argument sl_file: {}".format(sl_file)
position=[1862.135, -4020.792, -36292.348],
focal=[6587.835, 3849.085, 5688.164],
viewup=[0.185, -0.97, 0.161],
distance=42972.44,
clipping=[29629.503, 59872.10],
)
)
# Iniziale camera position
startcam = scene.plotter.moveCamera(cam1, cam2, frac[0])
# ------------------------------- Create frames ------------------------------ #
# Create frames
prev_neurons = []
for step in track(
np.arange(N_FRAMES), total=N_FRAMES, description="Generating frames..."
):
if step % N_frames_for_change == 0: # change neurons every N framse
# reset neurons from previous set of neurons
for neuron in prev_neurons:
for component, actor in neuron.items():
actor.alpha(minalpha)
actor.color(darkcolor)
prev_neurons = []
# highlight new neurons
neurons = choices(scene.actors["neurons"], k=N_neurons_in_frame)
for n, neuron in enumerate(neurons):
color = colorMap(
n, "Greens_r", vmin=-2, vmax=N_neurons_in_frame + 3
position=[1862.135, -4020.792, -36292.348],
focal=[6587.835, 3849.085, 5688.164],
viewup=[0.185, -0.97, 0.161],
distance=42972.44,
clipping=[29629.503, 59872.10],
)
)
# Iniziale camera position
startcam = scene.plotter.moveCamera(cam1, cam2, frac[0])
# ------------------------------- Create frames ------------------------------ #
# Create frames
prev_streamlines = []
for step in track(
np.arange(N_FRAMES), total=N_FRAMES, description="Generating frames..."
):
if step % N_frames_for_change == 0: # change neurons every N framse
# reset neurons from previous set of neurons
for mesh in prev_streamlines:
mesh.alpha(minalpha)
mesh.color(darkcolor)
prev_streamlines = []
# highlight new neurons
streamlines = choices(scene.actors["tracts"], k=N_streamlines_in_frame)
for n, mesh in enumerate(streamlines):
# color = colorMap(n, 'Reds', vmin=-2, vmax=N_streamlines_in_frame+3)
mesh.alpha(0.7)
mesh.color("orangered")