text
stringlengths
81
112k
Checkout to a specific commit. If commit is None then checkout to master. def checkout_commit(repo_path: str, commit: Any = None) -> None: # pylint:disable=redefined-outer-name """Checkout to a specific commit. If commit is None then checkout to master. """ commit = commit or...
Create a tar file based on the list of files passed def create_tarfile(files: List[str], tar_path: str) -> None: """Create a tar file based on the list of files passed""" with tarfile.open(tar_path, "w:gz") as tar: for f in files: tar.add(f)
Open and close the broker channel. def check() -> Result: """Open and close the broker channel.""" try: # Context to release connection with Connection(conf.get('CELERY_BROKER_URL')) as conn: conn.connect() except ConnectionRefusedError: retur...
Return a list of suggestions based on grid search. Params: matrix: `dict` representing the {hyperparam: hyperparam matrix config}. n_suggestions: number of suggestions to make. def get_suggestions(self, iteration_config=None): """Return a list of suggestions based on grid searc...
Find argmax of the acquisition function. def _maximize(self): """ Find argmax of the acquisition function.""" if not self.space.is_observations_valid(): return None y_max = self.space.y.max() self.utility_function.gaussian_process.fit(self.space.x, self.space.y) retu...
Construct the DAG of this pipeline based on the its operations and their downstream. def dag(self) -> Tuple[Dict, Dict]: """Construct the DAG of this pipeline based on the its operations and their downstream.""" from pipelines import dags operations = self.operations.all().prefetch_related('do...
Calculate the countdown for a celery task retry. def get_countdown(self, retries) -> int: """Calculate the countdown for a celery task retry.""" retry_delay = self.retry_delay if self.retry_exponential_backoff: return min( max(2 ** retries, retry_delay), # Exp. back...
Return the params to run the celery task. def get_run_params(self) -> Dict: """Return the params to run the celery task.""" params = {} if self.celery_queue: params['queue'] = self.celery_queue if self.timeout: params['soft_time_limit'] = self.timeout ...
Update the status of the current instance. Returns: boolean: if the instance is updated. def can_transition(self, status_from: str, status_to: str) -> bool: """Update the status of the current instance. Returns: boolean: if the instance is updated. """ ...
Construct the DAG of this pipeline run based on the its operation runs and their downstream. def dag(self) -> Tuple[Dict, Dict]: """Construct the DAG of this pipeline run based on the its operation runs and their downstream. """ from pipelines import dags operation_runs...
Checks the concurrency of the operation run. Checks the concurrency of the operation run to validate if we can start a new operation run. Returns: boolean: Whether to start a new operation run or not. def check_concurrency(self) -> bool: """Checks the concurrency of the op...
Checks the upstream and the trigger rule. def check_upstream_trigger(self) -> bool: """Checks the upstream and the trigger rule.""" if self.operation.trigger_policy == TriggerPolicy.ONE_DONE: return self.upstream_runs.filter( status__status__in=self.STATUSES.DONE_STATUS).exi...
Schedule the task: check first if the task can start: 1. we check that the task is still in the CREATED state. 2. we check that the upstream dependency is met. 3. we check that pipeline can start a new task; i.e. we check the concurrency of the pipeline. 4. ...
Start the celery task of this operation. def start(self) -> None: """Start the celery task of this operation.""" kwargs = self.celery_task_context # Update we the operation run id kwargs['operation_run_id'] = self.id # pylint:disable=unsupported-assignment-operation async_resu...
Return the image name and image tag for an experiment def get_experiment_image_info(experiment: 'Experiment') -> Tuple[str, str]: """Return the image name and image tag for an experiment""" project_name = experiment.project.name repo_name = project_name image_name = '{}/{}'.format(conf.get('REGISTRY_U...
Return the image name and image tag for a job def get_job_image_info(project: 'Project', job: Any) -> Tuple[str, str]: """Return the image name and image tag for a job""" project_name = project.name repo_name = project_name image_name = '{}/{}'.format(conf.get('REGISTRY_URI'), repo_name) try: ...
Return the image name and image tag for a job def get_notebook_image_info(project: 'Project', job: Any) -> Tuple[str, str]: """Return the image name and image tag for a job""" image_name, _ = get_job_image_info(project, job) return image_name, LATEST_IMAGE_TAG
Create an iteration for the experiment group. def create_iteration(self, num_suggestions): """Create an iteration for the experiment group.""" from db.models.experiment_groups import ExperimentGroupIteration iteration_config = self.experiment_group.iteration_config if iteration_config...
Update the last experiment group's iteration with experiment performance. def update_iteration(self): """Update the last experiment group's iteration with experiment performance.""" iteration_config = self.get_iteration_config() if not iteration_config: return experiments_me...
Return the number of configs to keep and resume. def get_n_config_to_keep(self, n_suggestions, bracket_iteration): """Return the number of configs to keep and resume.""" n_configs = n_suggestions * (self.eta ** -bracket_iteration) return int(n_configs / self.eta)
Return the number of configs to keep for an iteration and iteration bracket. This is just util function around `get_n_config_to_keep` def get_n_config_to_keep_for_iteration(self, iteration, bracket_iteration): """Return the number of configs to keep for an iteration and iteration bracket. Thi...
Return the number of iterations to run for this barcket_i This is just util function around `get_n_resources` def get_n_resources_for_iteration(self, iteration, bracket_iteration): """Return the number of iterations to run for this barcket_i This is just util function around `get_n_resources`...
Return a list of suggestions/arms based on hyperband. def get_suggestions(self, iteration_config=None): """Return a list of suggestions/arms based on hyperband.""" if not iteration_config or not isinstance(iteration_config, HyperbandIterationConfig): raise ValueError('Hyperband get suggesti...
Return a boolean to indicate if we need to reschedule another iteration. def should_reschedule(self, iteration, bracket_iteration): """Return a boolean to indicate if we need to reschedule another iteration.""" bracket = self.get_bracket(iteration=iteration) if bracket_iteration < bracket: ...
Return a boolean to indicate if we need to reschedule another bracket iteration. def should_reduce_configs(self, iteration, bracket_iteration): """Return a boolean to indicate if we need to reschedule another bracket iteration.""" n_configs_to_keep = self.get_n_config_to_keep_for_iteration( ...
Validates a resource def validate_resource(resource) -> None: """Validates a resource""" if resource is not None and not isinstance(resource, dict): raise ValidationError('The resource is not valid.') if isinstance(resource, dict) and set(resource.keys()) <= {'requests', 'limits'}: raise V...
Pod init container for setting outputs path. def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence...
We need to nest the git path inside the project path to make it easier to create docker images. def path(self) -> str: """We need to nest the git path inside the project path to make it easier to create docker images.""" return os.path.join(self.project_path, self.project.name)
Returns a tuple (hash, and commit object) def last_commit(self) -> Tuple: """Returns a tuple (hash, and commit object)""" from libs.repos import git return git.get_last_commit(repo_path=self.path)
Validates that we can only authenticate to one S3 and one GCS. def validate_stores_secrets_keys(stores_secrets): """Validates that we can only authenticate to one S3 and one GCS.""" stores = set([]) for store_secret in stores_secrets: if store_secret['store'] in stores: ...
Handles the case of GCS and S3 and create a volume with secret file. def get_stores_secrets_volumes(cls, stores_secrets): """Handles the case of GCS and S3 and create a volume with secret file.""" volumes = [] volume_mounts = [] for store_secret in stores_secrets: store = st...
Create an auth command for S3 and GCS. def get_stores_secrets_command_args(cls, stores_secrets): """Create an auth command for S3 and GCS.""" commands = [] for store_secret in stores_secrets: store = store_secret['store'] if store == GCS: commands.append(...
Returns a token to be used x number of times to allow a user account to access certain resource. def make_token(cls, ephemeral_token: 'RedisEphemeralTokens') -> str: """ Returns a token to be used x number of times to allow a user account to access certain resource. """ ...
Check that a token is correct for a given scope token. def check_token(self, token: 'RedisEphemeralTokens') -> bool: """ Check that a token is correct for a given scope token. """ if self.get_state() is None: # Token expired return False correct_token = self.make_t...
A function to find the maximum of the acquisition function It uses a combination of random sampling (cheap) and the 'L-BFGS-B' optimization method. First by sampling `n_warmup` (1e5) points at random, and then running L-BFGS-B from `n_iter` (250) random starting points. Params: ...
Ordering is set by a comma delimited ?ordering=... query parameter. The `ordering` query parameter can be overridden by setting the `ordering_param` value on the OrderingFilter or by specifying an `ORDERING_PARAM` value in the API settings. def get_ordering(self, request, queryset, view): ...
Pod init container for setting outputs path. def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence...
Generate the activation key which will be emailed to the user. def get_activation_key(self, user): """ Generate the activation key which will be emailed to the user. """ return signing.dumps( obj=getattr(user, user.USERNAME_FIELD), salt=self.key_salt )
Send the activation email. The activation key is the username, signed using TimestampSigner. def send_activation_email(self, user): """ Send the activation email. The activation key is the username, signed using TimestampSigner. """ activation_key = self.get_activation_...
Create the inactive user account and wait for validation from superuser def create_inactive_user(self, form): """Create the inactive user account and wait for validation from superuser""" new_user = form.save(commit=False) new_user.is_active = False new_user.save() return new_us...
Verify that the activation key is valid and within the permitted activation time window, returning the username if valid or ``None`` if not. def validate_key(self, activation_key): """ Verify that the activation key is valid and within the permitted activation time window, retur...
Given the verified username, look up and return the corresponding user account if it exists, or ``None`` if it doesn't. def get_user(self, username): """ Given the verified username, look up and return the corresponding user account if it exists, or ``None`` if it doesn'...
The base activation logic; subclasses should leave this method alone and implement activate(), which is called from this method. def get(self, request, *args, **kwargs): """The base activation logic; subclasses should leave this method alone and implement activate(), which is called from this m...
Pod job container for task. def get_pod_container(self, volume_mounts, persistence_outputs=None, persistence_data=None, outputs_refs_jobs=None, outputs_refs_experiments=None, ...
Pod sidecar container for task logs. def get_sidecar_container(self, volume_mounts): """Pod sidecar container for task logs.""" return get_sidecar_container( job_container_name=self.job_container_name, sidecar_container_name=self.sidecar_container_name, sidecar_docke...
Pod spec to be used to create pods for tasks: master, worker, ps. def get_task_pod_spec(self, volume_mounts, volumes, resource_name, persistence_outputs=None, persistence_data=None, ...
This validation step is done when we are sure the user does not exit on the systems and we need to create a new user. def validate_username(username): """ This validation step is done when we are sure the user does not exit on the systems and we need to create a new user. """ ...
This is where you should alter the context to fit the action. Default behaviour will leave the context as it is. def _prepare(cls, context: Dict) -> Dict: """This is where you should alter the context to fit the action. Default behaviour will leave the context as it is. """ if...
If experiment is a restart, we should resume from last check point def copy_experiment(experiment): """If experiment is a restart, we should resume from last check point""" try: publisher.publish_experiment_job_log( log_lines='Copying outputs from experiment `{}` into experiment `{}`'.forma...
Validate and record an event. >>> record('event.action', object_instance) def record(self, event_type: str, event_data: Mapping = None, instance: Any = None, **kwargs) -> 'Event': """ Validate and record an event. >>> record('event.a...
Build necessary code for a job to run def generate(job, build_path: str, from_image: str, build_steps: Optional[List[str]] = None, env_vars: Optional[List[Tuple[str, str]]] = None, nvidia_bin: str = None, set_lang_env: bool = True, ...
Return a list of suggestions based on random search. Params: matrix: `dict` representing the {hyperparam: hyperparam matrix config}. n_suggestions: number of suggestions to make. def get_suggestions(self, iteration_config=None): """Return a list of suggestions based on random s...
if an array is passed, set serializer to many def get_serializer(self, *args, **kwargs): """ if an array is passed, set serializer to many """ if isinstance(kwargs.get('data', {}), list): kwargs['many'] = True return super().get_serializer(*args, **kwargs)
Task handling for sidecars logs. def logs_handle_experiment_job(experiment_name: str, experiment_uuid: str, log_lines: Optional[Union[str, Iterable[str]]], temp: bool = True) -> None: """Task handling for sidecars logs."""...
Task handling for sidecars logs. def logs_handle_job(job_uuid: str, job_name: str, log_lines: Optional[Union[str, Iterable[str]]], temp: bool = True) -> None: """Task handling for sidecars logs.""" handle_job_logs(job_uuid=job_uuid, ...
Task handling for sidecars logs. def logs_handle_build_job(job_uuid: str, job_name: str, log_lines: Optional[Union[str, Iterable[str]]], temp: bool = True) -> None: """Task handling for sidecars logs.""" handle_build_job_logs(job_uui...
Create an iteration for the experiment group. def create_iteration(self, num_suggestions=0): """Create an iteration for the experiment group.""" from db.models.experiment_groups import ExperimentGroupIteration search_manager = self.experiment_group.search_manager iteration_config = sel...
Reduce the experiments to restart. def get_reduced_configs(self): """Reduce the experiments to restart.""" iteration_config = self.experiment_group.iteration_config if iteration_config is None: logger.error( 'Experiment group `%s` attempt to update iteration, but has...
Reduce the experiments to restart. def reduce_configs(self): """Reduce the experiments to restart.""" experiment_ids = self.get_reduced_configs() experiments = self.experiment_group.experiments.filter(id__in=experiment_ids) self.create_iteration() iteration_config = self.experim...
Similar to experiment_groups, but uses the default manager to return archived experiments as well. def all_experiment_groups(self): """ Similar to experiment_groups, but uses the default manager to return archived experiments as well. """ from db.models.experiment_groups...
Similar to jobs, but uses the default manager to return archived experiments as well. def all_jobs(self): """ Similar to jobs, but uses the default manager to return archived experiments as well. """ from db.models.jobs import Job return Job.all.filter(project=s...
Similar to build_jobs, but uses the default manager to return archived experiments as well. def all_build_jobs(self): """ Similar to build_jobs, but uses the default manager to return archived experiments as well. """ from db.models.build_jobs import BuildJob re...
Similar to notebook_jobs, but uses the default manager to return archived experiments as well. def all_notebook_jobs(self): """ Similar to notebook_jobs, but uses the default manager to return archived experiments as well. """ from db.models.notebooks import NotebookJob ...
Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well. def all_tensorboard_jobs(self): """ Similar to tensorboard_jobs, but uses the default manager to return archived experiments as well. """ from db.models.tensorboards import ...
Similar to experiments, but uses the default manager to return archived experiments as well. def all_experiments(self): """ Similar to experiments, but uses the default manager to return archived experiments as well. """ from db.models.experiments import Experiment ...
Similar to group_experiments, but uses the default manager to return archived experiments as well. def all_group_experiments(self): """ Similar to group_experiments, but uses the default manager to return archived experiments as well. """ from db.models.experiments impor...
Return a dag representation of the nodes passed. This is equally used for pipelines and pipeline runs. Params: nodes: an instance of `Operation` | `OperationRun` the nodes to represent en dag. downstream_fn: a function that returns the downstream nodes of the a node. Returns: tup...
Get a list of all node in the graph with no dependencies. def get_independent_nodes(dag): """Get a list of all node in the graph with no dependencies.""" nodes = set(dag.keys()) dependent_nodes = set([node for downstream_nodes in dag.values() for node in downstream_nodes]) return set(nodes - dependent_...
Get orphan nodes for given dag. def get_orphan_nodes(dag): """Get orphan nodes for given dag.""" independent_nodes = get_independent_nodes(dag) return set([node for node in independent_nodes if not dag[node]])
Checks if the node has dependencies. def has_dependencies(node, dag): """Checks if the node has dependencies.""" for downstream_nodes in dag.values(): if node in downstream_nodes: return True return False
Sort the dag breath first topologically. Only the nodes inside the dag are returned, i.e. the nodes that are also keys. Returns: a topological ordering of the DAG. Raises: an error if this is not possible (graph is not valid). def sort_topologically(dag): """Sort the dag breath firs...
Parse the negation modifier in an operation. def parse_negation_operation(operation: str) -> Tuple[bool, str]: """Parse the negation modifier in an operation.""" _operation = operation.strip() if not _operation: raise QueryParserException('Operation is not valid: {}'.format(operation)) negation...
Parse the comparision operator in an operation. def parse_comparison_operation(operation: str) -> Tuple[Optional[str], str]: """Parse the comparision operator in an operation.""" _operation = operation.strip() if not _operation: raise QueryParserException('Operation is not valid: {}'.format(operati...
Parse datetime operations. A datetime operation can one of the following: * single value: start_date:2014-10-10, start_date:>2014-10-10, start_date:>=2014-10-10 * negation single value: start_date:~2014-10-10 * interval: start_date:2010-10-10 10:10 .. 2012-10-10 * negation interval: start_date...
Parse scalar operations. A scalar operation can one of the following: * single value: start_date:12, metric1:>0.9, metric1:>=-0.12 * negation single value: metric1:~1112, metric1:~<1112 equivalent to metric1:>=1112 This parser does not allow `|` and `..`. def parse_scalar_operation(operation: str)...
Base parsing for expressions. Every expression must follow a basic format: `name:[modifier|operator]operation[*[operator]operation]` So this parser just split the expression into: field name, operation. def parse_expression(expression: str) -> Tuple[str, str]: """Base parsing for expressions. ...
Split a query into different expressions. Example: name:bla, foo:<=1 def split_query(query: str) -> List[str]: """Split a query into different expressions. Example: name:bla, foo:<=1 """ try: _query = query.strip() except (ValueError, AttributeError): raise Que...
Tokenizes a standard search query in name: operations mapping. Example: moo:bla, foo:~<=1, foo:ll..ff { 'moo': ['bla'], 'foo': ['~<=1', 'll..ff'] } def tokenize_query(query: str) -> Dict[str, Iterable]: """Tokenizes a standard search query in name: operations mappi...
Parses fields with underscores, and return field and suffix. Example: foo => foo, None metric.foo => metric, foo def parse_field(field: str) -> Tuple[str, Optional[str]]: """Parses fields with underscores, and return field and suffix. Example: foo => foo, None metric.foo =...
Set the upstream operations for operation run. def set_op_upstreams(op_run, op): """Set the upstream operations for operation run.""" # We get a list of all upstream ops or the current op upstream_ops = op.upstream_operations.values_list('id', flat=True) # We get latest op runs for the upstream_ops ...
Set the upstream runs for the operation runs in the dag following the topological sort. def set_topological_dag_upstreams(dag, ops, op_runs, runs_by_ops): """Set the upstream runs for the operation runs in the dag following the topological sort.""" sorted_ops = dags.sort_topologically(dag=dag) for op_id in...
Create a pipeline run/instance. def create_pipeline_run(pipeline, context_by_op): """Create a pipeline run/instance.""" pipeline_run = PipelineRun.objects.create(pipeline=pipeline) dag, ops = pipeline.dag # Go trough the operation and create operation runs and the upstreams op_runs = {} runs_by...
Experiment jobs statuses def k8s_events_handle_experiment_job_statuses(self: 'celery_app.task', payload: Dict) -> None: """Experiment jobs statuses""" details = payload['details'] job_uuid = details['labels']['job_uuid'] logger.debug('handling events status for job_uuid: %s, status: %s', ...
Project jobs statuses def k8s_events_handle_job_statuses(self: 'celery_app.task', payload: Dict) -> None: """Project jobs statuses""" details = payload['details'] job_uuid = details['labels']['job_uuid'] job_name = details['labels']['job_name'] project_name = details['labels'].get('project_name') ...
Project Plugin jobs statuses def k8s_events_handle_plugin_job_statuses(self: 'celery_app.task', payload: Dict) -> None: """Project Plugin jobs statuses""" details = payload['details'] app = details['labels']['app'] job_uuid = details['labels']['job_uuid'] job_name = details['labels']['job_name'] ...
Project Plugin jobs statuses def k8s_events_handle_build_job_statuses(self: 'celery_app.task', payload: Dict) -> None: """Project Plugin jobs statuses""" details = payload['details'] app = details['labels']['app'] job_uuid = details['labels']['job_uuid'] job_name = details['labels']['job_name'] ...
Return a pod sidecar container. def get_sidecar_container(job_container_name, sidecar_container_name, sidecar_docker_image, sidecar_docker_image_pull_policy, namespace, sidecar_config, ...
Use custom exception handler for errors. def handle_exception(self, exc): """Use custom exception handler for errors.""" if isinstance( exc, (rest_exceptions.NotAuthenticated, rest_exceptions.AuthenticationFailed)) and self.HANDLE_UNAUTHENTICATED: return HttpRe...
Create resources requirements. Args: resources: `PodResourcesConfig` Return: `V1ResourceRequirements` def get_resources(resources): # pylint:disable=too-many-branches """Create resources requirements. Args: resources: `PodResourcesConfig` Return: `V1ResourceRequ...
This logic is extracted here to be used also with Sanic api. def has_project_permissions(user: 'User', project: 'Project', request_method: str) -> bool: """This logic is extracted here to be used also with Sanic api.""" # Superusers and the creator is allowed to do everything if user.is_staff or user.is_su...
Delete all group outputs. def experiment_group_pre_delete(sender, **kwargs): """Delete all group outputs.""" instance = kwargs['instance'] if instance.is_selection: return # Delete outputs and logs celery_app.send_task( SchedulerCeleryTasks.STORES_SCHEDULE_OUTPUTS_DELETION, ...
Delete all group outputs. def experiment_group_post_delete(sender, **kwargs): """Delete all group outputs.""" instance = kwargs['instance'] auditor.record(event_type=EXPERIMENT_GROUP_DELETED, instance=instance) remove_bookmarks(object_id=instance.id, content_type='experimentgroup')
Return a static asset URL (located within Polyaxon's static files). Example: ```python >>> get_asset_url('polyaxon', 'dist/global.css') ... "/_static/74d127b78dc7daf2c51f/polyaxon/dist/global.css" ``` def get_asset_url(module: str, path: str) -> str: """Return a static asset UR...
Pod init container for setting outputs path. def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence...
Pod init container for setting outputs path. def get_init_container(self, init_command, init_args, env_vars, context_mounts, persistence_outputs, persistence...
Create an iteration for the experiment group (works for grid and random). def create_iteration(self, num_suggestions): """Create an iteration for the experiment group (works for grid and random).""" from db.models.experiment_groups import ExperimentGroupIteration iteration_config = BaseIterati...
Update the last experiment group's iteration with experiment performance. def update_iteration(self): """Update the last experiment group's iteration with experiment performance.""" iteration_config = self.get_iteration_config() if not iteration_config: return experiments_me...
Update iteration's num_suggestions. def update_iteration_num_suggestions(self, num_suggestions): """Update iteration's num_suggestions.""" iteration_config = self.experiment_group.iteration_config iteration_config.num_suggestions = num_suggestions self._update_config(iteration_config)
We only validate the config if passed. Also we use the ExperimentSpecification to check if this config was intended as an experiment. def validate_config(self, config): """We only validate the config if passed. Also we use the ExperimentSpecification to check if this config was ...
Return the second part of the event_type e.g. >>> Event.event_type = 'experiment.deleted' >>> Event.get_event_action() == 'deleted' def get_event_action(cls) -> Optional[str]: """Return the second part of the event_type e.g. >>> Event.event_type = 'experiment.deleted...