Here are the examples of the python api django.conf.settings.TASK taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
3 Examples
3
View Complete Implementation : tasks.py
Copyright Apache License 2.0
Author : SubstraFoundation
Copyright Apache License 2.0
Author : SubstraFoundation
def try_remove_local_folder(subtuple, compute_plan_id):
if settings.TASK['CLEAN_EXECUTION_ENVIRONMENT']:
if compute_plan_id is not None:
rank = int(subtuple['rank'])
if rank == -1:
remove_local_folder(compute_plan_id)
0
View Complete Implementation : tasks.py
Copyright Apache License 2.0
Author : SubstraFoundation
Copyright Apache License 2.0
Author : SubstraFoundation
@app.task(bind=True, ignore_result=False, base=ComputeTask)
def compute_task(self, tuple_type, subtuple, compute_plan_id):
try:
worker = self.request.hostname.split('@')[1]
queue = self.request.delivery_info['routing_key']
except Exception:
worker = f"{settings.LEDGER['name']}.worker"
queue = f"{settings.LEDGER['name']}"
result = {'worker': worker, 'queue': queue, 'computePlanID': compute_plan_id}
try:
prepare_materials(subtuple, tuple_type)
res = do_task(subtuple, tuple_type)
result['result'] = res
except Exception as e:
raise self.retry(
exc=e,
countdown=int(getattr(settings, 'CELERY_TASK_RETRY_DELAY_SECONDS')),
max_retries=int(getattr(settings, 'CELERY_TASK_MAX_RETRIES')))
finally:
if settings.TASK['CLEAN_EXECUTION_ENVIRONMENT']:
try:
subtuple_directory = get_subtuple_directory(subtuple)
remove_subtuple_materials(subtuple_directory)
except Exception as e:
logging.exception(e)
return result
0
View Complete Implementation : tasks.py
Copyright Apache License 2.0
Author : SubstraFoundation
Copyright Apache License 2.0
Author : SubstraFoundation
def _do_task(client, subtuple_directory, tuple_type, subtuple, compute_plan_id, rank, org_name):
model_folder = '/sandbox/model'
model_path = path.join(subtuple_directory, 'model')
data_path = path.join(subtuple_directory, 'data')
pred_path = path.join(subtuple_directory, 'pred')
opener_file = path.join(subtuple_directory, 'opener/opener.py')
algo_path = path.join(subtuple_directory)
algo_docker = f'substra/algo_{subtuple["key"][0:8]}'.lower() # tag must be lowercase for docker
algo_docker_name = f'{tuple_type}_{subtuple["key"][0:8]}'
output_head_model_filename = 'head_model'
output_trunk_model_filename = 'trunk_model'
remove_image = not((compute_plan_id is not None and rank != -1) or settings.TASK['CACHE_DOCKER_IMAGES'])
# VOLUMES
symlinks_volume = {}
for subfolder in os.listdir(data_path):
real_path = os.path.realpath(os.path.join(data_path, subfolder))
symlinks_volume[real_path] = {'bind': f'{real_path}', 'mode': 'ro'}
volumes = {
data_path: {'bind': '/sandbox/data', 'mode': 'ro'},
pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'},
opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}
}
model_volume = {
model_path: {'bind': model_folder, 'mode': 'rw'}
}
# local volume for train like tuples in compute plan
if compute_plan_id is not None and tuple_type != TESTTUPLE_TYPE:
volume_id = get_volume_id(compute_plan_id)
try:
client.volumes.get(volume_id=volume_id)
except docker.errors.NotFound:
client.volumes.create(name=volume_id)
model_volume[volume_id] = {'bind': '/sandbox/local', 'mode': 'rw'}
# generate command
if tuple_type == TRAINTUPLE_TYPE:
command = 'train'
algo_docker_name = f'{algo_docker_name}_{command}'
if subtuple['inModels'] is not None:
in_traintuple_keys = [subtuple_model["traintupleKey"] for subtuple_model in subtuple['inModels']]
command = f"{command} {' '.join(in_traintuple_keys)}"
if rank is not None:
command = f"{command} --rank {rank}"
elif tuple_type == TESTTUPLE_TYPE:
command = 'predict'
algo_docker_name = f'{algo_docker_name}_{command}'
if COMPOSITE_TRAINTUPLE_TYPE == subtuple['traintupleType']:
composite_traintuple_key = subtuple['traintupleKey']
command = f"{command} --input-models-path {model_folder}"
command = f"{command} --input-head-model-filename {PREFIX_HEAD_FILENAME}{composite_traintuple_key}"
command = f"{command} --input-trunk-model-filename {PREFIX_TRUNK_FILENAME}{composite_traintuple_key}"
else:
in_model = subtuple["traintupleKey"]
command = f'{command} {in_model}'
elif tuple_type == COMPOSITE_TRAINTUPLE_TYPE:
command = 'train'
algo_docker_name = f'{algo_docker_name}_{command}'
command = f"{command} --output-models-path {model_folder}"
command = f"{command} --output-head-model-filename {output_head_model_filename}"
command = f"{command} --output-trunk-model-filename {output_trunk_model_filename}"
if subtuple['inHeadModel'] and subtuple['inTrunkModel']:
command = f"{command} --input-models-path {model_folder}"
in_head_model = subtuple['inHeadModel']
in_head_model_key = in_head_model.get('traintupleKey')
command = f"{command} --input-head-model-filename {PREFIX_HEAD_FILENAME}{in_head_model_key}"
in_trunk_model = subtuple['inTrunkModel']
in_trunk_model_key = in_trunk_model.get('traintupleKey')
command = f"{command} --input-trunk-model-filename {PREFIX_TRUNK_FILENAME}{in_trunk_model_key}"
if rank is not None:
command = f"{command} --rank {rank}"
elif tuple_type == AGGREGATETUPLE_TYPE:
command = 'aggregate'
algo_docker_name = f'{algo_docker_name}_{command}'
if subtuple['inModels'] is not None:
in_aggregatetuple_keys = [subtuple_model["traintupleKey"] for subtuple_model in subtuple['inModels']]
command = f"{command} {' '.join(in_aggregatetuple_keys)}"
if rank is not None:
command = f"{command} --rank {rank}"
compute_docker(
client=client,
resources_manager=resources_manager,
dockerfile_path=algo_path,
image_name=algo_docker,
container_name=algo_docker_name,
volumes={**volumes, **model_volume, **symlinks_volume},
command=command,
remove_image=remove_image,
remove_container=settings.TASK['CLEAN_EXECUTION_ENVIRONMENT'],
capture_logs=settings.TASK['CAPTURE_LOGS']
)
# save model in database
if tuple_type in [TRAINTUPLE_TYPE, AGGREGATETUPLE_TYPE]:
end_model_file, end_model_file_hash = save_model(subtuple_directory, subtuple['key'])
elif tuple_type == COMPOSITE_TRAINTUPLE_TYPE:
end_head_model_file, end_head_model_file_hash = save_model(
subtuple_directory,
subtuple['key'],
filename=output_head_model_filename,
)
end_trunk_model_file, end_trunk_model_file_hash = save_model(
subtuple_directory,
subtuple['key'],
filename=output_trunk_model_filename,
)
# create result
result = {}
if tuple_type in (TRAINTUPLE_TYPE, AGGREGATETUPLE_TYPE):
result['end_model_file_hash'] = end_model_file_hash
result['end_model_file'] = end_model_file
elif tuple_type == COMPOSITE_TRAINTUPLE_TYPE:
result['end_head_model_file_hash'] = end_head_model_file_hash
result['end_head_model_file'] = end_head_model_file
result['end_trunk_model_file_hash'] = end_trunk_model_file_hash
result['end_trunk_model_file'] = end_trunk_model_file
# evaluation
if tuple_type != TESTTUPLE_TYPE: # skip evaluation
return result
metrics_path = f'{subtuple_directory}/metrics'
eval_docker = f'substra/metrics_{subtuple["key"][0:8]}'.lower() # tag must be lowercase for docker
eval_docker_name = f'{tuple_type}_{subtuple["key"][0:8]}_eval'
compute_docker(
client=client,
resources_manager=resources_manager,
dockerfile_path=metrics_path,
image_name=eval_docker,
container_name=eval_docker_name,
volumes={**volumes, **symlinks_volume},
command=None,
remove_image=remove_image,
remove_container=settings.TASK['CLEAN_EXECUTION_ENVIRONMENT'],
capture_logs=settings.TASK['CAPTURE_LOGS']
)
# load performance
with open(path.join(pred_path, 'perf.json'), 'r') as perf_file:
perf = json.load(perf_file)
result['global_perf'] = perf['all']
return result