Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def calculate_coordinates_shell_3d(distance, distance_step_size):
"""Create points along the surface of a sphere (a shell) where no gap
between points is larger than the defined distance_step_size"""
number_of_rows = np.ceil(np.pi * distance / distance_step_size).astype(int) + 1
elevation = np.linspace(0, np.pi, number_of_rows)
row_radii = distance * np.sin(elevation)
row_circumference = 2 * np.pi * row_radii
amount_in_row = np.ceil(row_circumference / distance_step_size).astype(int) + 1
x_coords = []
y_coords = []
z_coords = []
for i, phi in enumerate(elevation):
azimuth = np.linspace(0, 2 * np.pi, amount_in_row[i] + 1)[:-1:]
x_coords.append(distance * np.sin(phi) * np.cos(azimuth))
y_coords.append(distance * np.sin(phi) * np.sin(azimuth))
z_coords.append(distance * np.cos(phi) * np.ones_like(azimuth))
return (np.hstack(x_coords), np.hstack(y_coords), np.hstack(z_coords))
def create_control_point_sequence(
beam, beam_collimation, rotation_direction, dose_rate, gantry, coll, nominal_energy
):
num_cps = len(gantry)
meterset_weight = np.linspace(0, 1, num_cps)
cp_bounds = bounding_control_points(
beam, beam_collimation, rotation_direction, dose_rate
)
control_point_sequence = pydicom.Sequence([copy.deepcopy(cp_bounds["first"])])
control_point_sequence[0].GantryAngle = str(gantry[0])
control_point_sequence[0].BeamLimitingDeviceAngle = str(coll[0])
control_point_sequence[0].NominalBeamEnergy = nominal_energy
for i in range(1, num_cps - 1):
cp = copy.deepcopy(cp_bounds["mid"])
cp.GantryAngle = str(gantry[i])
cp.BeamLimitingDeviceAngle = str(coll[i])
abs(peak2 - peak1) < 2500
): # if the two peaks are close together proceeed to analysis
cumsum_prev = 1e7
if peak2 < peak1: # this guarantee that we always slide the overlay
amp_base_res = amplitude[:, k]
amp_overlay_res = amplitude[:, j]
else:
amp_base_res = amplitude[:, j]
amp_overlay_res = amplitude[:, k]
if peak_type[j] == 0:
inc = -1
else:
inc = 1
for i in range(0, inc * 80, inc * 1):
x = np.linspace(
0, 0 + (len(amp_base_res) * dx), len(amplitude), endpoint=False
) # definition of the distance axis
amp_overlay_res_roll = np.roll(amp_overlay_res, i)
# amplitude is the vector to analyze +-500 samples from the center
amp_tot = (
amp_base_res[peaks[j] - 1000 : peaks[j] + 1000]
+ amp_overlay_res_roll[peaks[j] - 1000 : peaks[j] + 1000]
) # divided by 2 to normalize
xsel = x[peaks[j] - 1000 : peaks[j] + 1000]
amp_filt = running_mean(amp_tot, 281)
cumsum = np.sum(np.abs(amp_tot - amp_filt))
if cumsum > cumsum_prev: # then we went too far
ax = fig.add_subplot(amplitude.shape[1] - 1, 1, kk)
def calculate_coordinates_shell_2d(distance, distance_step_size):
"""Create points along the circumference of a circle. The spacing
between points is not larger than the defined distance_step_size
"""
amount_to_check = np.ceil(2 * np.pi * distance / distance_step_size).astype(int) + 1
theta = np.linspace(0, 2 * np.pi, amount_to_check + 1)[:-1:]
x_coords = distance * np.cos(theta)
y_coords = distance * np.sin(theta)
return (x_coords, y_coords)
def define_penumbra_points_at_origin(edge_lengths, penumbra):
penumbra_range = np.linspace(-penumbra / 2, penumbra / 2, 11)
def _each_edge(current_edge_length, orthogonal_edge_length):
half_field_range = np.linspace(
-orthogonal_edge_length / 4, orthogonal_edge_length / 4, 51
)
a_side_lookup = -current_edge_length / 2 + penumbra_range
b_side_lookup = current_edge_length / 2 + penumbra_range
current_axis_lookup = np.concatenate([a_side_lookup, b_side_lookup])
return current_axis_lookup, half_field_range
edge_points_left_right = _each_edge(edge_lengths[0], edge_lengths[1])
edge_points_top_bot = _each_edge(edge_lengths[1], edge_lengths[0])
xx_left_right, yy_left_right = np.meshgrid(*edge_points_left_right)
def merge_view_vert(volume, dx, dy):
junctions = []
# creating merged volume
merge_vol = np.zeros((volume.shape[0], volume.shape[1]))
# creating vector for processing along cols (one row)
amplitude = np.zeros(
(volume.shape[1], volume.shape[2])
) # 1 if it is vertical 0 if the bars are horizontal
x = np.linspace(
0, 0 + (volume.shape[1] * dx), volume.shape[1], endpoint=False
) # definition of the distance axis
# x = np.arange(0,)#definition of the distance axis
# merging the two images together
ampl_resamp = np.zeros(((volume.shape[1]) * 10, volume.shape[2]))
# amp_peak = np.zeros((volume.shape[1]) * 10)
for item in tqdm(range(0, volume.shape[2])):
merge_vol = merge_vol + volume[:, :, item]
amplitude[:, item] = volume[int(volume.shape[0] / 2), :, item]
ampl_resamp[:, item] = signal.resample(
amplitude[:, item], int(len(amplitude)) * 10
) # resampling the amplitude vector
# amp_peak = amp_peak + ampl_resamp[:, item] / volume.shape[2]
def define_rotation_field_points_at_origin(edge_lengths, penumbra):
x_half_range = edge_lengths[0] / 2 + penumbra / 2
y_half_range = edge_lengths[1] / 2 + penumbra / 2
num_x = np.ceil(x_half_range * 2 * 8) + 1
num_y = np.ceil(y_half_range * 2 * 8) + 1
x = np.linspace(-x_half_range, x_half_range, int(num_x))
y = np.linspace(-y_half_range, y_half_range, int(num_y))
xx, yy = np.meshgrid(x, y)
xx_flat = np.ravel(xx)
yy_flat = np.ravel(yy)
inside = np.logical_and(
(np.abs(xx_flat) < x_half_range), (np.abs(yy_flat) < y_half_range)
)
xx_flat = xx_flat[np.invert(inside)]
yy_flat = yy_flat[np.invert(inside)]
return xx_flat, yy_flat
volume_resort[:, :, np.argmax(diag_stack)] = volume[:, :, i]
# creating merged volumes
merge_vol = np.zeros((volume_resort.shape[0], volume_resort.shape[1]))
# creating vector for processing (1 horizontal & 1 vertical)
amplitude_horz = np.zeros(
(volume_resort.shape[1], volume_resort.shape[2])
) # 1 if it is vertical 0 if the bars are horizontal
amplitude_vert = np.zeros((volume_resort.shape[0], volume_resort.shape[2]))
y = np.linspace(
0, 0 + (volume_resort.shape[0] * dy), volume_resort.shape[0], endpoint=False
) # definition of the distance axis
x = np.linspace(
0, 0 + (volume_resort.shape[1] * dy), volume_resort.shape[1], endpoint=False
) # definition of the distance axis
ampl_resamp_y1 = np.zeros(
((volume_resort.shape[0]) * 10, int(volume_resort.shape[2] / 2))
)
ampl_resamp_y2 = np.zeros(
((volume_resort.shape[0]) * 10, int(volume_resort.shape[2] / 2))
)
ampl_resamp_x1 = np.zeros(
((volume_resort.shape[1]) * 10, int(volume_resort.shape[2] / 2))
)
ampl_resamp_x2 = np.zeros(
((volume_resort.shape[1]) * 10, int(volume_resort.shape[2] / 2))
)
def resample_contour(contour, n=51):
tck, _ = splprep([contour[0], contour[1], contour[2]], s=0, k=1)
new_points = splev(np.linspace(0, 1, n), tck)
return new_points
peaks = []
peak_type = []
for j in range(0, ampl_resamp.shape[1] - 1):
amp_base_res = signal.savgol_filter(ampl_resamp[:, j], 1501, 1)
for k in range(j + 1, ampl_resamp.shape[1]):
amp_overlay_res = signal.savgol_filter(ampl_resamp[:, k], 1501, 1)
peak1, _ = find_peaks(amp_base_res, prominence=0.5)
peak2, _ = find_peaks(amp_overlay_res, prominence=0.5)
# print('peak find', peak1, peak2, abs(peak2 - peak1))
if (
abs(peak2 - peak1) <= 4000
): # if the two peaks are separated the two fields are not adjacent.
amp_peak = (ampl_resamp[:, j] + ampl_resamp[:, k]) / 2
x = np.linspace(
0, 0 + (len(amp_peak) * dx / 10), len(amp_peak), endpoint=False
) # definition of the distance axis
peak_pos, _ = find_peaks(
signal.savgol_filter(
amp_peak[min(peak1[0], peak2[0]) : max(peak1[0], peak2[0])],
201,
3,
),
prominence=0.010,
)
peak_neg, _ = find_peaks(
signal.savgol_filter(
-amp_peak[min(peak1[0], peak2[0]) : max(peak1[0], peak2[0])],
201,
3,