Skip to content

Batch

LocalBatch dataclass

Bases: Serializable

report

report()

Generate analysis report base on currently completed tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/task/batch.py
def report(self) -> Report:
    """
    Generate analysis report base on currently
    completed tasks in the LocalBatch.

    Return:
        Report

    """

    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Local")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

rerun

rerun(multiprocessing=False, num_workers=None, **kwargs)

Rerun all the tasks in the LocalBatch.

Return

Report

Source code in src/bloqade/task/batch.py
@beartype
def rerun(
    self, multiprocessing: bool = False, num_workers: Optional[int] = None, **kwargs
):
    """
    Rerun all the tasks in the LocalBatch.

    Return:
        Report

    """

    return self._run(
        multiprocessing=multiprocessing, num_workers=num_workers, **kwargs
    )

RemoteBatch dataclass

Bases: Serializable

total_nshots property

total_nshots

Total number of shots of all tasks in the RemoteBatch

Return

number of shots

cancel

cancel()

Cancel all the tasks in the Batch.

Return

self

Source code in src/bloqade/task/batch.py
def cancel(self) -> "RemoteBatch":
    """
    Cancel all the tasks in the Batch.

    Return:
        self

    """
    # cancel all jobs
    for task in self.tasks.values():
        task.cancel()

    return self

fetch

fetch()

Fetch the tasks in the Batch.

Note

Fetching will update the status of tasks, and only pull the results for those tasks that have completed.

Return

self

Source code in src/bloqade/task/batch.py
def fetch(self) -> "RemoteBatch":
    """
    Fetch the tasks in the Batch.

    Note:
        Fetching will update the status of tasks,
        and only pull the results for those tasks
        that have completed.

    Return:
        self

    """
    # online, non-blocking
    # pull the results only when its ready
    for task in self.tasks.values():
        task.fetch()

    return self

get_completed_tasks

get_completed_tasks()

Create a RemoteBatch object that contain completed tasks from current Batch.

Tasks consider completed with following status codes:

  1. Completed
  2. Partial
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_completed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain completed tasks from current Batch.

    Tasks consider completed with following status codes:

    1. Completed
    2. Partial

    Return:
        RemoteBatch

    """
    statuses = [
        "Completed",
        "Partial",
    ]
    return self.get_tasks(*statuses)

get_failed_tasks

get_failed_tasks()

Create a RemoteBatch object that contain failed tasks from current Batch.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain failed tasks from current Batch.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that are
    # completed because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.get_tasks(*statuses)

get_finished_tasks

get_finished_tasks()

Create a RemoteBatch object that contain finished tasks from current Batch.

Tasks consider finished with following status codes:

  1. Failed
  2. Unaccepted
  3. Completed
  4. Partial
  5. Cancelled
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def get_finished_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain finished tasks from current Batch.

    Tasks consider finished with following status codes:

    1. Failed
    2. Unaccepted
    3. Completed
    4. Partial
    5. Cancelled

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward for any reason
    statuses = ["Completed", "Failed", "Unaccepted", "Partial", "Cancelled"]
    return self.remove_tasks(*statuses)

get_tasks

get_tasks(*status_codes)

Get Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
@beartype
def get_tasks(self, *status_codes: str) -> "RemoteBatch":
    """
    Get Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:
    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_task_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            new_task_results[task_number] = task

    return RemoteBatch(self.source, new_task_results, name=self.name)

pull

pull()

Pull results of the tasks in the Batch.

Note

Pulling will pull the results for the tasks. If a given task(s) has not been completed, wait until it finished.

Return

self

Source code in src/bloqade/task/batch.py
def pull(self) -> "RemoteBatch":
    """
    Pull results of the tasks in the Batch.

    Note:
        Pulling will pull the results for the tasks.
        If a given task(s) has not been completed, wait
        until it finished.

    Return:
        self
    """
    # online, blocking
    # pull the results. if its not ready, hanging
    for task in self.tasks.values():
        task.pull()

    return self

remove_failed_tasks

remove_failed_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with failed tasks removed.

failed tasks with following status codes:

  1. Failed
  2. Unaccepted
Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def remove_failed_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with failed tasks removed.

    failed tasks with following status codes:

    1. Failed
    2. Unaccepted

    Return:
        RemoteBatch

    """
    # statuses that are in a state that will
    # not run going forward because of an error
    statuses = ["Failed", "Unaccepted"]
    return self.remove_tasks(*statuses)

remove_invalid_tasks

remove_invalid_tasks()

Create a RemoteBatch object that contain tasks from current Batch, with all Unaccepted tasks removed.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
def remove_invalid_tasks(self) -> "RemoteBatch":
    """
    Create a RemoteBatch object that
    contain tasks from current Batch,
    with all Unaccepted tasks removed.

    Return:
        RemoteBatch

    """
    return self.remove_tasks("Unaccepted")

remove_tasks

remove_tasks(*status_codes)

Remove Tasks with specify status_codes.

Return

RemoteBatch

Source code in src/bloqade/task/batch.py
@beartype
def remove_tasks(self, *status_codes: str) -> "RemoteBatch":
    """
    Remove Tasks with specify status_codes.

    Return:
        RemoteBatch

    """
    # offline:

    st_codes = [QuEraTaskStatusCode(x) for x in status_codes]

    new_results = OrderedDict()
    for task_number, task in self.tasks.items():
        if task.task_result_ir.task_status in st_codes:
            continue

        new_results[task_number] = task

    return RemoteBatch(self.source, new_results, self.name)

report

report()

Generate analysis report base on currently completed tasks in the RemoteBatch.

Return

Report

Source code in src/bloqade/task/batch.py
def report(self) -> "Report":
    """
    Generate analysis report base on currently
    completed tasks in the RemoteBatch.

    Return:
        Report

    """
    ## this potentially can be specialize/disatch
    ## offline
    index = []
    data = []
    metas = []
    geos = []

    for task_number, task in self.tasks.items():
        ## fliter not existing results tasks:
        if (task.task_id is None) or (not task._result_exists()):
            continue

        ## filter has result but is not correctly completed.
        if not task.task_result_ir.task_status == QuEraTaskStatusCode.Completed:
            continue

        geometry = task.geometry
        perfect_sorting = "".join(map(str, geometry.filling))
        parallel_decoder = geometry.parallel_decoder

        if parallel_decoder:
            cluster_indices = parallel_decoder.get_cluster_indices()
        else:
            cluster_indices = {(0, 0): list(range(len(perfect_sorting)))}

        shot_iter = filter(
            lambda shot: shot.shot_status == QuEraShotStatusCode.Completed,
            task.result().shot_outputs,
        )

        for shot, (cluster_coordinate, cluster_index) in product(
            shot_iter, cluster_indices.items()
        ):
            pre_sequence = "".join(
                map(
                    str,
                    (shot.pre_sequence[index] for index in cluster_index),
                )
            )

            post_sequence = np.asarray(
                [shot.post_sequence[index] for index in cluster_index],
                dtype=np.int8,
            )

            pfc_sorting = "".join(
                [perfect_sorting[index] for index in cluster_index]
            )

            key = (
                task_number,
                cluster_coordinate,
                pfc_sorting,
                pre_sequence,
            )

            index.append(key)
            data.append(post_sequence)

        metas.append(task.metadata)
        geos.append(task.geometry)

    index = pd.MultiIndex.from_tuples(
        index, names=["task_number", "cluster", "perfect_sorting", "pre_sequence"]
    )

    df = pd.DataFrame(data, index=index)
    df.sort_index(axis="index")

    rept = None
    if self.name is None:
        rept = Report(df, metas, geos, "Remote")
    else:
        rept = Report(df, metas, geos, self.name)

    return rept

resubmit

resubmit(shuffle_submit_order=True)

Resubmit all the tasks in the RemoteBatch

Return

self

Source code in src/bloqade/task/batch.py
@beartype
def resubmit(self, shuffle_submit_order: bool = True) -> "RemoteBatch":
    """
    Resubmit all the tasks in the RemoteBatch

    Return:
        self

    """
    # online, non-blocking
    self._submit(shuffle_submit_order, force=True)
    return self

tasks_metric

tasks_metric()

Get current tasks status metric

Return

dataframe with ["task id", "status", "shots"]

Source code in src/bloqade/task/batch.py
def tasks_metric(self) -> pd.DataFrame:
    """
    Get current tasks status metric

    Return:
        dataframe with ["task id", "status", "shots"]

    """
    # [TODO] more info on current status
    # offline, non-blocking
    tid = []
    data = []
    for int, task in self.tasks.items():
        tid.append(int)

        dat = [None, None, None]
        dat[0] = task.task_id
        if task.task_result_ir is not None:
            dat[1] = task.task_result_ir.task_status.name
            dat[2] = task.task_ir.nshots
        data.append(dat)

    return pd.DataFrame(data, index=tid, columns=["task ID", "status", "shots"])

Serializable

json

json(**options)

Serialize the object to JSON string.

Return

JSON string

Source code in src/bloqade/task/batch.py
def json(self, **options) -> str:
    """
    Serialize the object to JSON string.

    Return:
        JSON string

    """
    from bloqade import dumps

    return dumps(self, **options)