Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def create_tc_task(event, task, required_task_ids):
command = build_full_command(event, task)
worker_type = ("wpt-docker-worker"
if event["repository"]["full_name"] == 'web-platform-tests/wpt'
else "github-worker")
task_id = taskcluster.slugId()
task_data = {
"taskGroupId": "", # TODO
"created": taskcluster.fromNowJSON(""),
"deadline": taskcluster.fromNowJSON(task["deadline"]),
"provisionerId": task["provisionerId"],
"workerType": worker_type,
"metadata": {
"name": task["name"],
"description": task.get("description", ""),
"owner": "%s@users.noreply.github.com" % event["sender"]["login"],
"source": event["repository"]["url"]
},
"payload": {
"artifacts": task.get("artifacts"),
"command": command,
"image": task.get("image"),
def build_task_graph(event, all_tasks, tasks):
task_id_map = OrderedDict()
taskgroup_id = os.environ.get("TASK_ID", taskcluster.slugId())
def add_task(task_name, task):
depends_on_ids = []
if "depends-on" in task:
for depends_name in task["depends-on"]:
if depends_name not in task_id_map:
add_task(depends_name,
all_tasks[depends_name])
depends_on_ids.append(task_id_map[depends_name][0])
task_id, task_data = create_tc_task(event, task, taskgroup_id, depends_on_ids)
task_id_map[task_name] = (task_id, task_data)
for task_name, task in iteritems(tasks):
add_task(task_name, task)
return task_id_map
def create_tc_task(event, task, taskgroup_id, depends_on_ids):
command = build_full_command(event, task)
task_id = taskcluster.slugId()
task_data = {
"taskGroupId": taskgroup_id,
"created": taskcluster.fromNowJSON(""),
"deadline": taskcluster.fromNowJSON(task["deadline"]),
"provisionerId": task["provisionerId"],
"schedulerId": task["schedulerId"],
"workerType": task["workerType"],
"metadata": {
"name": task["name"],
"description": task.get("description", ""),
"owner": get_owner(event),
"source": event["repository"]["clone_url"]
},
"payload": {
"artifacts": task.get("artifacts"),
"command": command,
"image": task.get("image"),
"maxRunTime": task.get("maxRunTime"),
"env": task.get("env", {}),
def create_tc_task(event, task, taskgroup_id, depends_on_ids):
command = build_full_command(event, task)
task_id = taskcluster.slugId()
task_data = {
"taskGroupId": taskgroup_id,
"created": taskcluster.fromNowJSON(""),
"deadline": taskcluster.fromNowJSON(task["deadline"]),
"provisionerId": task["provisionerId"],
"schedulerId": task["schedulerId"],
"workerType": task["workerType"],
"metadata": {
"name": task["name"],
"description": task.get("description", ""),
"owner": get_owner(event),
"source": event["repository"]["clone_url"]
},
"payload": {
"artifacts": task.get("artifacts"),
"command": command,
"image": task.get("image"),
"maxRunTime": task.get("maxRunTime"),
"env": task.get("env", {}),
},
def create_tc_task(event, task, required_task_ids):
command = build_full_command(event, task)
worker_type = ("wpt-docker-worker"
if event["repository"]["full_name"] == 'web-platform-tests/wpt'
else "github-worker")
task_id = taskcluster.slugId()
task_data = {
"taskGroupId": "", # TODO
"created": taskcluster.fromNowJSON(""),
"deadline": taskcluster.fromNowJSON(task["deadline"]),
"provisionerId": task["provisionerId"],
"workerType": worker_type,
"metadata": {
"name": task["name"],
"description": task.get("description", ""),
"owner": "%s@users.noreply.github.com" % event["sender"]["login"],
"source": event["repository"]["url"]
},
"payload": {
"artifacts": task.get("artifacts"),
"command": command,
"image": task.get("image"),
"maxRunTime": task.get("maxRunTime"),
"env": task.get("env", []),
},
def create_tc_task(event, task, required_task_ids):
command = build_full_command(event, task)
worker_type = ("wpt-docker-worker"
if event["repository"]["full_name"] == 'web-platform-tests/wpt'
else "github-worker")
task_id = taskcluster.slugId()
task_data = {
"taskGroupId": "", # TODO
"created": taskcluster.fromNowJSON(""),
"deadline": taskcluster.fromNowJSON(task["deadline"]),
"provisionerId": task["provisionerId"],
"workerType": worker_type,
"metadata": {
"name": task["name"],
"description": task.get("description", ""),
"owner": "%s@users.noreply.github.com" % event["sender"]["login"],
"source": event["repository"]["url"]
},
"payload": {
"artifacts": task.get("artifacts"),
"command": command,
"image": task.get("image"),
"maxRunTime": task.get("maxRunTime"),
"env": task.get("env", []),
},
"extras": {
parser.add_argument("--partner-build-num", type=int, default=1,
help="Specify the partner build number")
parser.add_argument("--partner-subset", type=str,
help="Specify a comma-delimited subset of partners to repack")
parser.add_argument("--force", action="store_true", default=False,
help="Submit action task without asking")
args = parser.parse_args()
release_runner_config = yaml.safe_load(args.release_runner_config)
tc_config = {
"credentials": {
"clientId": release_runner_config["taskcluster"].get("client_id"),
"accessToken": release_runner_config["taskcluster"].get("access_token"),
},
"maxRetries": 12,
}
queue = taskcluster.Queue(tc_config)
prev_action_task = get_task(args.action_task_id)
action_task_input = copy.deepcopy(prev_action_task["extra"]["action"]["context"]["input"])
decision_task_id = args.decision_task_id
if decision_task_id:
params_yaml = get_artifact_text(queue, decision_task_id, 'public/parameters.yml')
else:
params_yaml = get_artifact_text(queue, args.action_task_id, 'public/parameters.yml')
product = args.action_flavor.split('_')[1]
repo_param_prefix = get_repo_param_prefix(release_runner_config['releases'], product)
parameters = yaml.safe_load(params_yaml)
project = parameters["project"]
revision = parameters['{}head_rev'.format(repo_param_prefix)]
},
method='GET',
ext=utils.toStr(self.makeHawkExt()),
url=requestUrl,
timestamp=expiration,
nonce='',
# content='',
# content_type='',
)
bewit = mohawk.bewit.get_bewit(resource)
return bewit.rstrip('=')
bewit = genBewit()
if not bewit:
raise exceptions.TaskclusterFailure('Did not receive a bewit')
u = urllib.parse.urlparse(requestUrl)
qs = u.query
if qs:
qs += '&'
qs += 'bewit=%s' % bewit
return urllib.parse.urlunparse((
u.scheme,
u.netloc,
u.path,
u.params,
qs,
u.fragment,
))
def get_task_metrics_from_date(model, date, output_directory):
options = get_taskcluster_options()
index = taskcluster.Index(options)
index.ping()
# Split the date
from_date = date.split(".")
namespaces = []
# Start at the root level
# We need an empty list in order to append namespaces part to it
namespaces.append([])
# Recursively list all namespaces greater or equals than the given date
while namespaces:
current_ns = namespaces.pop()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print name + '-s -o -n <data name=""> -d (decode base64)'
sys.exit()
elif opt in ("-s"):
secret_path = arg
elif opt in ("-o"):
output_file = arg
elif opt in ("-n"):
data_name = arg
elif opt in ("-d"):
decode = True
if data_name == '':
data_name = os.path.basename(secret_path)
secrets = taskcluster.Secrets({'rootUrl': os.environ['TASKCLUSTER_PROXY_URL']})
data = secrets.get(secret_path)
data = data['secret'][data_name]
if decode:
data = base64.b64decode(data)
with open(output_file, 'w') as output:
output.write(data)
</data>