Source code for labscheduler.sila_server.generated.schedulingservice.schedulingservice_client
# Generated by sila2.code_generator; sila2.__version__: 0.12.2
# -----
# This class does not do anything useful at runtime. Its only purpose is to provide type annotations.
# Since sphinx does not support .pyi files (yet?), this is a .py file.
# -----
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import Iterable
from typing import List, Optional
from schedulingservice_types import ComputeSchedule_Responses, SelectAlgorithm_Responses
from sila2.client import ClientMetadataInstance, ClientObservableCommandInstance, ClientUnobservableProperty
from .schedulingservice_types import AlgorithmMetaData, WorkflowGraph
[docs]
class SchedulingServiceClient:
"""
Provides an interface for the PythonLabOrchestrator or any other lab environment to interact with the
scheduling module. It provides an observable command to schedule a workflow(exact syntax explained in command).
You can choose between different algorithms as get their metadata.
"""
AvailableAlgorithms: ClientUnobservableProperty[list[AlgorithmMetaData]]
"""
List of algorithm info for all available algorithms
"""
CurrentAlgorithm: ClientUnobservableProperty[AlgorithmMetaData]
"""
The currently selected algorithm
"""
[docs]
def SelectAlgorithm(
self,
AlgorithmName: str,
*,
metadata: Iterable[ClientMetadataInstance] | None = None,
) -> SelectAlgorithm_Responses:
"""
Selects the algorithm to be used in the ComputeSchedule command
"""
[docs]
def ComputeSchedule(
self,
WorkflowGraph: WorkflowGraph,
MaxComputationTime: float,
*,
metadata: Iterable[ClientMetadataInstance] | None = None,
) -> ClientObservableCommandInstance[ComputeSchedule_Responses]:
"""
Takes a workflow graph (see parameter description) and computes a schedule for the currently configured lab
environment(LabConfigurationController) with the currently selected algorithm.
You have to give the maximum computation time.
"""