Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def run(self, command, override_src_dir=None):
src_dir = self.src_dir if override_src_dir is None else override_src_dir
io.ensure_dir(self.log_dir)
log_file = os.path.join(self.log_dir, "build.log")
# we capture all output to a dedicated build log file
build_cmd = "export JAVA_HOME={}; cd {}; {} > {} 2>&1".format(self.java_home, src_dir, command, log_file)
self.logger.info("Running build command [%s]", build_cmd)
if process.run_subprocess(build_cmd):
msg = "Executing '{}' failed. The last 20 lines in the build log file are:\n".format(command)
msg += "=========================================================================================================\n"
with open(log_file, "r", encoding="utf-8") as f:
msg += "\t"
msg += "\t".join(f.readlines()[-20:])
msg += "=========================================================================================================\n"
msg += "The full build log is available at [{}].".format(log_file)
raise BuildError(msg)
def download(self, base_url, target_path, size_in_bytes, detail_on_missing_root_url):
file_name = os.path.basename(target_path)
if not base_url:
raise exceptions.DataError("%s and it cannot be downloaded because no base URL is provided."
% detail_on_missing_root_url)
if self.offline:
raise exceptions.SystemSetupError("Cannot find %s. Please disable offline mode and retry again." % target_path)
data_url = "%s/%s" % (base_url, file_name)
try:
io.ensure_dir(os.path.dirname(target_path))
if size_in_bytes:
size_in_mb = round(convert.bytes_to_mb(size_in_bytes))
self.logger.info("Downloading data from [%s] (%s MB) to [%s].", data_url, size_in_mb, target_path)
else:
self.logger.info("Downloading data from [%s] to [%s].", data_url, target_path)
# we want to have a bit more accurate download progress as these files are typically very large
progress = net.Progress("[INFO] Downloading data for track %s" % self.track_name, accuracy=1)
net.download(data_url, target_path, size_in_bytes, progress_indicator=progress)
progress.finish()
self.logger.info("Downloaded data from [%s] to [%s].", data_url, target_path)
except urllib.error.HTTPError as e:
if e.code == 404 and self.test_mode:
raise exceptions.DataError("Track [%s] does not support test mode. Please ask the track author to add it or "
"disable test mode and retry." % self.track_name)
else:
def write_single_report(report_file, report_format, cwd, headers, data_plain, data_rich, write_header=True, show_also_in_console=True):
if report_format == "markdown":
formatter = format_as_markdown
elif report_format == "csv":
formatter = format_as_csv
else:
raise exceptions.SystemSetupError("Unknown report format '%s'" % report_format)
if show_also_in_console:
print_internal(formatter(headers, data_rich))
if len(report_file) > 0:
normalized_report_file = rio.normalize_path(report_file, cwd)
logger.info("Writing report to [%s] (user specified: [%s]) in format [%s]" %
(normalized_report_file, report_file, report_format))
# ensure that the parent folder already exists when we try to write the file...
rio.ensure_dir(rio.dirname(normalized_report_file))
with open(normalized_report_file, mode="a+", encoding="UTF-8") as f:
f.writelines(formatter(headers, data_plain, write_header))
def install(self, binary):
self.logger.info("Preparing candidate locally in [%s].", self.install_dir)
io.ensure_dir(self.install_dir)
io.ensure_dir(self.node_log_dir)
io.ensure_dir(self.heap_dump_dir)
self.logger.info("Unzipping %s to %s", binary, self.install_dir)
io.decompress(binary, self.install_dir)
self.es_home_path = glob.glob(os.path.join(self.install_dir, "elasticsearch*"))[0]
self.data_paths = self._data_paths()
try:
io.ensure_dir(self.install_dir)
io.ensure_dir(self.node_log_dir)
io.ensure_dir(self.data_paths[0])
finally:
os.umask(previous_umask)
mounts = {}
for car_config_path in self.car.config_paths:
for root, dirs, files in os.walk(car_config_path):
env = jinja2.Environment(loader=jinja2.FileSystemLoader(root))
relative_root = root[len(car_config_path) + 1:]
absolute_target_root = os.path.join(self.install_dir, relative_root)
io.ensure_dir(absolute_target_root)
for name in files:
source_file = os.path.join(root, name)
target_file = os.path.join(absolute_target_root, name)
mounts[target_file] = os.path.join("/usr/share/elasticsearch", relative_root, name)
if plain_text(source_file):
logger.info("Reading config template file [%s] and writing to [%s]." % (source_file, target_file))
with open(target_file, mode="a", encoding="utf-8") as f:
f.write(_render_template(env, self.config_vars, source_file))
else:
logger.info("Treating [%s] as binary and copying as is to [%s]." % (source_file, target_file))
shutil.copy(source_file, target_file)
docker_cfg = self._render_template_from_file(self.docker_vars(mounts))
logger.info("Starting Docker container with configuration:\n%s" % docker_cfg)
It also ensures that the default log path has been created so log files
can be successfully opened in that directory.
"""
log_config = log_config_path()
if not io.exists(log_config):
io.ensure_dir(io.dirname(log_config))
source_path = io.normalize_path(os.path.join(os.path.dirname(__file__), "resources", "logging.json"))
with open(log_config, "w", encoding="UTF-8") as target:
with open(source_path, "r", encoding="UTF-8") as src:
# Ensure we have a trailing path separator as after LOG_PATH there will only be the file name
log_path = os.path.join(paths.logs(), "")
# the logging path might contain backslashes that we need to escape
log_path = io.escape_path(log_path)
contents = src.read().replace("${LOG_PATH}", log_path)
target.write(contents)
io.ensure_dir(paths.logs())
def _store(self, doc):
import json
io.ensure_dir(self.race_path)
# if the user has overridden the effective start date we guarantee a unique file name but do not let them use them for tournaments.
with open(self._output_file_name(doc), mode="wt", encoding="utf-8") as f:
f.write(json.dumps(doc, indent=True, ensure_ascii=False))
def run(self, command, override_src_dir=None):
src_dir = self.src_dir if override_src_dir is None else override_src_dir
logger.info("Building from sources in [%s].", src_dir)
logger.info("Executing %s...", command)
io.ensure_dir(self.log_dir)
log_file = os.path.join(self.log_dir, "build.log")
# we capture all output to a dedicated build log file
build_cmd = "export JAVA_HOME={}; cd {}; {} >> {} 2>&1".format(self.java_home, src_dir, command, log_file)
logger.info("Running build command [%s]", build_cmd)
if process.run_subprocess(build_cmd):
msg = "Executing '{}' failed. The last 20 lines in the build log file are:\n".format(command)
msg += "=========================================================================================================\n"
with open(log_file, "r", encoding="utf-8") as f:
msg += "\t"
msg += "\t".join(f.readlines()[-20:])
msg += "=========================================================================================================\n"
msg += "The full build log is available at [{}].".format(log_file)
def _apply_config(source_root_path, target_root_path, config_vars):
logger = logging.getLogger(__name__)
for root, dirs, files in os.walk(source_root_path):
env = jinja2.Environment(loader=jinja2.FileSystemLoader(root))
relative_root = root[len(source_root_path) + 1:]
absolute_target_root = os.path.join(target_root_path, relative_root)
io.ensure_dir(absolute_target_root)
for name in files:
source_file = os.path.join(root, name)
target_file = os.path.join(absolute_target_root, name)
if plain_text(source_file):
logger.info("Reading config template file [%s] and writing to [%s].", source_file, target_file)
# automatically merge config snippets from plugins (e.g. if they want to add config to elasticsearch.yml)
with open(target_file, mode="a", encoding="utf-8") as f:
f.write(_render_template(env, config_vars, source_file))
else:
logger.info("Treating [%s] as binary and copying as is to [%s].", source_file, target_file)
shutil.copy(source_file, target_file)