diff --git a/cmd/armadactl/cmd/queue.go b/cmd/armadactl/cmd/queue.go index 102354a7afd..27c9d092449 100644 --- a/cmd/armadactl/cmd/queue.go +++ b/cmd/armadactl/cmd/queue.go @@ -58,13 +58,18 @@ Job priority is evaluated inside queue, queue has its own priority. Any labels return fmt.Errorf("error reading queue labels: %s", err) } + labelsAsMap, err := labelSliceAsMap(labels) + if err != nil { + return fmt.Errorf("error converting queue labels to map: %s", err) + } + newQueue, err := queue.NewQueue(&api.Queue{ Name: name, PriorityFactor: priorityFactor, UserOwners: owners, GroupOwners: groups, Cordoned: cordoned, - Labels: labels, + Labels: labelsAsMap, }) if err != nil { return fmt.Errorf("invalid queue data: %s", err) @@ -220,13 +225,18 @@ func queueUpdateCmdWithApp(a *armadactl.App) *cobra.Command { return fmt.Errorf("error reading queue labels: %s", err) } + labelsAsMap, err := labelSliceAsMap(labels) + if err != nil { + return fmt.Errorf("error converting queue labels to map: %s", err) + } + newQueue, err := queue.NewQueue(&api.Queue{ Name: name, PriorityFactor: priorityFactor, UserOwners: owners, GroupOwners: groups, Cordoned: cordoned, - Labels: labels, + Labels: labelsAsMap, }) if err != nil { return fmt.Errorf("invalid queue data: %s", err) diff --git a/cmd/armadactl/cmd/utils.go b/cmd/armadactl/cmd/utils.go index 5482aa44477..0757994483f 100644 --- a/cmd/armadactl/cmd/utils.go +++ b/cmd/armadactl/cmd/utils.go @@ -1,6 +1,9 @@ package cmd -import "fmt" +import ( + "fmt" + "strings" +) func queueNameValidation(queueName string) error { if queueName == "" { @@ -8,3 +11,15 @@ func queueNameValidation(queueName string) error { } return nil } + +func labelSliceAsMap(labels []string) (map[string]string, error) { + mapToReturn := make(map[string]string) + for _, label := range labels { + splitLabel := strings.Split(label, "=") + if len(splitLabel) != 2 { + return nil, fmt.Errorf("invalid label: %s", label) + } + mapToReturn[splitLabel[0]] = splitLabel[1] + } + return mapToReturn, nil +} diff --git a/docs/python_airflow_operator.md b/docs/python_airflow_operator.md index 29709313ac9..486d47cde1c 100644 --- a/docs/python_airflow_operator.md +++ b/docs/python_airflow_operator.md @@ -12,7 +12,7 @@ This class provides integration with Airflow and Armada ## armada.operators.armada module -### _class_ armada.operators.armada.ArmadaOperator(name, channel_args, armada_queue, job_request, job_set_prefix='', lookout_url_template=None, poll_interval=30, container_logs=None, k8s_token_retriever=None, deferrable=False, job_acknowledgement_timeout=300, \*\*kwargs) +### _class_ armada.operators.armada.ArmadaOperator(name, channel_args, armada_queue, job_request, job_set_prefix='', lookout_url_template=None, poll_interval=30, container_logs=None, k8s_token_retriever=None, deferrable=False, job_acknowledgement_timeout=300, dry_run=False, \*\*kwargs) Bases: `BaseOperator`, `LoggingMixin` An Airflow operator that manages Job submission to Armada. @@ -33,7 +33,7 @@ and handles job cancellation if the Airflow task is killed. * **armada_queue** (*str*) – - * **job_request** (*JobSubmitRequestItem*) – + * **job_request** (*JobSubmitRequestItem** | **Callable**[**[**Context**, **jinja2.Environment**]**, **JobSubmitRequestItem**]*) – * **job_set_prefix** (*Optional**[**str**]*) – @@ -57,8 +57,9 @@ and handles job cancellation if the Airflow task is killed. * **job_acknowledgement_timeout** (*int*) – + * **dry_run** (*bool*) – + -#### _property_ client(_: ArmadaClien_ ) #### execute(context) Submits the job to Armada and polls for completion. @@ -76,6 +77,10 @@ Submits the job to Armada and polls for completion. +#### _property_ hook(_: ArmadaHoo_ ) + +#### lookout_url(job_id) + #### on_kill() Override this method to clean up subprocesses when a task instance gets killed. @@ -89,6 +94,8 @@ operator needs to be cleaned up, or it will leave ghost processes behind. +#### operator_extra_links(_: Collection[BaseOperatorLink_ _ = (LookoutLink(),_ ) + #### _property_ pod_manager(_: KubernetesPodLogManage_ ) #### render_template_fields(context, jinja_env=None) @@ -117,6 +124,8 @@ Args: #### template_fields(_: Sequence[str_ _ = ('job_request', 'job_set_prefix'_ ) + +#### template_fields_renderers(_: Dict[str, str_ _ = {'job_request': 'py'_ ) Initializes a new ArmadaOperator. @@ -132,7 +141,7 @@ Initializes a new ArmadaOperator. * **armada_queue** (*str*) – The name of the Armada queue to which the job will be submitted. - * **job_request** (*JobSubmitRequestItem*) – The job to be submitted to Armada. + * **job_request** (*JobSubmitRequestItem** | **Callable**[**[**Context**, **jinja2.Environment**]**, **JobSubmitRequestItem**]*) – The job to be submitted to Armada. * **job_set_prefix** (*Optional**[**str**]*) – A string to prepend to the jobSet name. @@ -156,10 +165,39 @@ for asynchronous execution. :param job_acknowledgement_timeout: The timeout in seconds to wait for a job to be acknowledged by Armada. :type job_acknowledgement_timeout: int +:param dry_run: Run Operator in dry-run mode - render Armada request and terminate. +:type dry_run: bool :param kwargs: Additional keyword arguments to pass to the BaseOperator. -### armada.operators.armada.log_exceptions(method) +### _class_ armada.operators.armada.LookoutLink() +Bases: `BaseOperatorLink` + + +#### get_link(operator, \*, ti_key) +Link to external system. + +Note: The old signature of this function was `(self, operator, dttm: datetime)`. That is still +supported at runtime but is deprecated. + + +* **Parameters** + + + * **operator** (*BaseOperator*) – The Airflow operator object this link is associated to. + + + * **ti_key** (*TaskInstanceKey*) – TaskInstance ID to return link for. + + + +* **Returns** + + link to external system + + + +#### name(_ = 'Lookout_ ) ## armada.triggers.armada module ## armada.auth module @@ -176,18 +214,10 @@ Bases: `Protocol` str - -#### serialize() - -* **Return type** - - *Tuple*[str, *Dict*[str, *Any*]] - - ## armada.model module -### _class_ armada.model.GrpcChannelArgs(target, options=None, compression=None, auth=None, auth_details=None) +### _class_ armada.model.GrpcChannelArgs(target, options=None, compression=None, auth=None) Bases: `object` @@ -197,32 +227,31 @@ Bases: `object` * **target** (*str*) – - * **options** (*Sequence**[**Tuple**[**str**, **Any**]**] **| **None*) – + * **options** (*Optional**[**Sequence**[**Tuple**[**str**, **Any**]**]**]*) – - * **compression** (*Compression** | **None*) – + * **compression** (*Optional**[**grpc.Compression**]*) – - * **auth** (*AuthMetadataPlugin** | **None*) – + * **auth** (*Optional**[**grpc.AuthMetadataPlugin**]*) – - * **auth_details** (*Dict**[**str**, **Any**] **| **None*) – +#### _static_ deserialize(data, version) +* **Parameters** -#### aio_channel() - -* **Return type** + + * **data** (*dict**[**str**, **Any**]*) – - *Channel* + * **version** (*int*) – -#### channel() * **Return type** - *Channel* + *GrpcChannelArgs* @@ -231,3 +260,50 @@ Bases: `object` * **Return type** *Dict*[str, *Any*] + + + +### _class_ armada.model.RunningJobContext(armada_queue: 'str', job_id: 'str', job_set_id: 'str', submit_time: 'DateTime', cluster: 'Optional[str]' = None, last_log_time: 'Optional[DateTime]' = None, job_state: 'str' = 'UNKNOWN') +Bases: `object` + + +* **Parameters** + + + * **armada_queue** (*str*) – + + + * **job_id** (*str*) – + + + * **job_set_id** (*str*) – + + + * **submit_time** (*DateTime*) – + + + * **cluster** (*str** | **None*) – + + + * **last_log_time** (*DateTime** | **None*) – + + + * **job_state** (*str*) – + + + +#### armada_queue(_: st_ ) + +#### cluster(_: str | Non_ _ = Non_ ) + +#### job_id(_: st_ ) + +#### job_set_id(_: st_ ) + +#### job_state(_: st_ _ = 'UNKNOWN_ ) + +#### last_log_time(_: DateTime | Non_ _ = Non_ ) + +#### _property_ state(_: JobStat_ ) + +#### submit_time(_: DateTim_ ) diff --git a/internal/armadactl/queue.go b/internal/armadactl/queue.go index 193087b143c..b9d3a84e3d4 100644 --- a/internal/armadactl/queue.go +++ b/internal/armadactl/queue.go @@ -16,11 +16,16 @@ import ( "github.com/armadaproject/armada/pkg/client/util" ) +// QueueQueryArgs is used for retrieving queues or for cordoning/uncordoning type QueueQueryArgs struct { - InQueueNames []string + // Filter for queues where the InQueueNames slice contains the queue name + InQueueNames []string + // Filter for queues where the queue contains all labels specified in the ContainsAllLabels slice ContainsAllLabels []string - InvertResult bool - OnlyCordoned bool + // Filter for cordoned queues only + OnlyCordoned bool + // Applies the above filters and inverts the result + InvertResult bool } // CreateQueue calls app.QueueAPI.Create with the provided parameters. @@ -83,18 +88,22 @@ func (a *App) GetQueue(name string) error { func (a *App) getAllQueuesAsAPIQueue(args *QueueQueryArgs) ([]*api.Queue, error) { queueFilters := func(q *api.Queue) bool { containsAllLabels := slices.AllFunc(args.ContainsAllLabels, func(label string) bool { - // If the label is a key, map the labels slice to only keys - labelsToCompare := q.Labels - if len(strings.Split(label, "=")) == 1 { - labelsToCompare = slices.Map(q.Labels, func(queueLabel string) string { return strings.Split(queueLabel, "=")[0] }) + splitLabel := strings.Split(label, "=") + if len(splitLabel) >= 2 { + queueLabelValue, ok := q.Labels[splitLabel[0]] + return ok && queueLabelValue == strings.Join(splitLabel[1:], "") + } else if len(splitLabel) == 1 { + // If the label is a key, we compare on keys + _, ok := q.Labels[splitLabel[0]] + return ok } - return goslices.Contains(labelsToCompare, label) + return false }) inQueues := len(args.InQueueNames) == 0 || goslices.Contains(args.InQueueNames, q.Name) - invertedResult := args.InvertResult != (containsAllLabels && inQueues) + matchesLabelsAndQueues := containsAllLabels && inQueues onlyCordonedCheck := (args.OnlyCordoned && q.Cordoned) || !args.OnlyCordoned - return invertedResult && onlyCordonedCheck + return args.InvertResult != (matchesLabelsAndQueues && onlyCordonedCheck) } queuesToReturn, err := a.Params.QueueAPI.GetAll() if err != nil { diff --git a/internal/armadactl/queue_test.go b/internal/armadactl/queue_test.go new file mode 100644 index 00000000000..c21a201319f --- /dev/null +++ b/internal/armadactl/queue_test.go @@ -0,0 +1,508 @@ +package armadactl + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/armadaproject/armada/internal/common/slices" + "github.com/armadaproject/armada/pkg/api" +) + +func TestQueueFiltering(t *testing.T) { + tests := map[string]struct { + queuesToFilter []*api.Queue + args *QueueQueryArgs + expectedQueueNames []string + errorExpected bool + }{ + "empty filter": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + }, + { + Name: "queue-b", + }, + }, + args: &QueueQueryArgs{}, + expectedQueueNames: []string{"queue-a", "queue-b"}, + errorExpected: false, + }, + "query single queue by name": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + }, + { + Name: "queue-b", + }, + }, + args: &QueueQueryArgs{ + InQueueNames: []string{"queue-a"}, + }, + expectedQueueNames: []string{"queue-a"}, + errorExpected: false, + }, + "query non-matching queue by name": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + }, + { + Name: "queue-b", + }, + }, + args: &QueueQueryArgs{ + InQueueNames: []string{"queue-z"}, + }, + expectedQueueNames: []string{}, + errorExpected: false, + }, + "query multiple queues by name": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + }, + { + Name: "queue-b", + }, + }, + args: &QueueQueryArgs{ + InQueueNames: []string{"queue-a", "queue-b"}, + }, + expectedQueueNames: []string{"queue-a", "queue-b"}, + errorExpected: false, + }, + "filter on single label": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool=cpu", + }, + }, + expectedQueueNames: []string{"queue-a"}, + errorExpected: false, + }, + "filter on single label key": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool", + }, + }, + expectedQueueNames: []string{"queue-a", "queue-b", "queue-c"}, + errorExpected: false, + }, + "filter on multiple labels": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool", "armadaproject.io/priority=high", + }, + }, + expectedQueueNames: []string{"queue-a"}, + errorExpected: false, + }, + "filter on multiple labels and queue name": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool", "armadaproject.io/priority=high", + }, + InQueueNames: []string{ + "queue-a", + }, + }, + expectedQueueNames: []string{"queue-a"}, + errorExpected: false, + }, + "filter on multiple labels and queue name, no matches": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool", "armadaproject.io/priority=high", + }, + InQueueNames: []string{ + "queue-b", + }, + }, + expectedQueueNames: []string{}, + errorExpected: false, + }, + "filter on cordoned status": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: true, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + OnlyCordoned: true, + }, + expectedQueueNames: []string{"queue-b"}, + errorExpected: false, + }, + "filter on label and cordoned status": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: true, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool=mixed", + }, + OnlyCordoned: true, + }, + expectedQueueNames: []string{"queue-c"}, + errorExpected: false, + }, + "simple query inverted": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: true, + }, + }, + args: &QueueQueryArgs{ + InQueueNames: []string{"queue-a"}, + InvertResult: true, + }, + expectedQueueNames: []string{"queue-b", "queue-c"}, + errorExpected: false, + }, + "all matching query inverted": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: true, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{ + "armadaproject.io/pool", + }, + InvertResult: true, + }, + expectedQueueNames: []string{}, + errorExpected: false, + }, + "none-matching query inverted": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: true, + }, + }, + args: &QueueQueryArgs{ + InQueueNames: []string{"queue-a"}, + OnlyCordoned: true, + InvertResult: true, + }, + expectedQueueNames: []string{"queue-a", "queue-b", "queue-c"}, + errorExpected: false, + }, + "complex query inverted": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: false, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: true, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{"armadaproject.io/pool"}, + InQueueNames: []string{"queue-c"}, + InvertResult: true, + }, + expectedQueueNames: []string{"queue-a", "queue-b"}, + errorExpected: false, + }, + "complex query inverted 2": { + queuesToFilter: []*api.Queue{ + { + Name: "queue-a", + Labels: map[string]string{ + "armadaproject.io/pool": "cpu", + "armadaproject.io/priority": "high", + }, + Cordoned: false, + }, + { + Name: "queue-b", + Labels: map[string]string{ + "armadaproject.io/pool": "gpu", + "armadaproject.io/priority": "medium", + }, + Cordoned: true, + }, + { + Name: "queue-c", + Labels: map[string]string{ + "armadaproject.io/pool": "mixed", + "armadaproject.io/priority": "low", + }, + Cordoned: false, + }, + }, + args: &QueueQueryArgs{ + ContainsAllLabels: []string{"armadaproject.io/pool"}, + InQueueNames: []string{"queue-b", "queue-c"}, + OnlyCordoned: true, + InvertResult: true, + }, + expectedQueueNames: []string{"queue-a", "queue-c"}, + errorExpected: false, + }, + } + + for name, tc := range tests { + t.Run(name, func(tt *testing.T) { + app := New() + + // No mocking needed for this test thanks to the functional design of QueueAPI + app.Params.QueueAPI.GetAll = func() ([]*api.Queue, error) { + return tc.queuesToFilter, nil + } + + filteredQueues, err := app.getAllQueuesAsAPIQueue(tc.args) + assert.Equal(tt, tc.errorExpected, err != nil) + assert.Equal(tt, tc.expectedQueueNames, slices.Map(filteredQueues, func(q *api.Queue) string { + return q.Name + })) + }) + } +} diff --git a/internal/common/metrics/domain.go b/internal/common/metrics/domain.go index 599f1fdc8b6..86b7bd80c83 100644 --- a/internal/common/metrics/domain.go +++ b/internal/common/metrics/domain.go @@ -5,12 +5,13 @@ import ( "time" armadaresource "github.com/armadaproject/armada/internal/common/resource" + "github.com/armadaproject/armada/pkg/api" ) type QueueMetricProvider interface { GetQueuedJobMetrics(queueName string) []*QueueMetrics GetRunningJobMetrics(queueName string) []*QueueMetrics - GetQueuePriorites() map[string]float64 + GetAllQueues() []*api.Queue } type QueueMetrics struct { diff --git a/internal/common/metrics/scheduler_metrics.go b/internal/common/metrics/scheduler_metrics.go index a99af7e7509..b6ce130f500 100644 --- a/internal/common/metrics/scheduler_metrics.go +++ b/internal/common/metrics/scheduler_metrics.go @@ -176,6 +176,22 @@ var QueuePriorityDesc = prometheus.NewDesc( nil, ) +var ( + queueLabelMetricName = MetricPrefix + "queue_labels" + queueLabelMetricDescription = "Queue labels" + queueLabelDefaultLabels = []string{"queueName", "queue"} +) + +// QueueLabelDesc so it can be added to AllDescs which makes Describe() work properly +// +// actual describe for this metric is generated dynamically as the labels are dynamic +var QueueLabelDesc = prometheus.NewDesc( + queueLabelMetricName, + queueLabelMetricDescription, + queueLabelDefaultLabels, + nil, +) + var AllDescs = []*prometheus.Desc{ QueueSizeDesc, QueuePriorityDesc, @@ -202,6 +218,7 @@ var AllDescs = []*prometheus.Desc{ ClusterCapacityDesc, ClusterAvailableCapacityDesc, QueuePriorityDesc, + QueueLabelDesc, } func Describe(out chan<- *prometheus.Desc) { @@ -265,8 +282,9 @@ func CollectQueueMetrics(queueCounts map[string]int, queueDistinctSchedulingKeyC } } } - for q, priority := range metricsProvider.GetQueuePriorites() { - metrics = append(metrics, NewQueuePriorityMetric(priority, q)) + for _, queue := range metricsProvider.GetAllQueues() { + metrics = append(metrics, NewQueuePriorityMetric(queue.PriorityFactor, queue.Name)) + metrics = append(metrics, NewQueueLabelsMetric(queue.Name, queue.Labels)) } return metrics } @@ -366,3 +384,26 @@ func NewQueueUsed(value float64, queue string, cluster string, pool string, reso func NewQueuePriorityMetric(value float64, queue string) prometheus.Metric { return prometheus.MustNewConstMetric(QueuePriorityDesc, prometheus.GaugeValue, value, queue, queue) } + +func NewQueueLabelsMetric(queue string, labels map[string]string) prometheus.Metric { + metricLabels := make([]string, 0, len(labels)+len(queueLabelDefaultLabels)) + values := make([]string, 0, len(labels)+len(queueLabelDefaultLabels)) + + metricLabels = append(metricLabels, queueLabelDefaultLabels...) + values = append(values, queue) + values = append(values, queue) + + for key, value := range labels { + metricLabels = append(metricLabels, key) + values = append(values, value) + } + + queueLabelsDesc := prometheus.NewDesc( + queueLabelMetricName, + queueLabelMetricDescription, + metricLabels, + nil, + ) + + return prometheus.MustNewConstMetric(queueLabelsDesc, prometheus.GaugeValue, 1, values...) +} diff --git a/internal/eventingester/ingester.go b/internal/eventingester/ingester.go index db5a402a7c6..d2fe2f8d09d 100644 --- a/internal/eventingester/ingester.go +++ b/internal/eventingester/ingester.go @@ -64,7 +64,7 @@ func Run(config *configuration.EventIngesterConfiguration) { config.SubscriptionName, config.BatchSize, config.BatchDuration, - pulsar.KeyShared, + pulsar.Failover, converter, eventDb, config.MetricsPort, diff --git a/internal/executor/service/resource_cleanup.go b/internal/executor/service/resource_cleanup.go index fa432fed2ec..2657d142750 100644 --- a/internal/executor/service/resource_cleanup.go +++ b/internal/executor/service/resource_cleanup.go @@ -175,13 +175,12 @@ func (r *ResourceCleanupService) canPodBeRemoved(pod *v1.Pod) bool { return false } - lastContainerStart := util.FindLastContainerStartTime(pod) - if lastContainerStart.Add(r.kubernetesConfiguration.MinimumPodAge).After(time.Now()) { + lastChange, err := util.LastStatusChange(pod) + if err == nil && lastChange.Add(r.kubernetesConfiguration.MinimumPodAge).After(time.Now()) { return false } if pod.Status.Phase == v1.PodFailed { - lastChange, err := util.LastStatusChange(pod) if err == nil && lastChange.Add(r.kubernetesConfiguration.FailedPodExpiry).After(time.Now()) { return false } diff --git a/internal/lookoutingesterv2/ingester.go b/internal/lookoutingesterv2/ingester.go index 2df02f2301c..3d02c6148d0 100644 --- a/internal/lookoutingesterv2/ingester.go +++ b/internal/lookoutingesterv2/ingester.go @@ -60,7 +60,7 @@ func Run(config *configuration.LookoutIngesterV2Configuration) { config.SubscriptionName, config.BatchSize, config.BatchDuration, - pulsar.KeyShared, + pulsar.Failover, converter, lookoutDb, config.MetricsPort, diff --git a/internal/scheduler/metrics.go b/internal/scheduler/metrics.go index 23e6765abb1..67a8c5cd838 100644 --- a/internal/scheduler/metrics.go +++ b/internal/scheduler/metrics.go @@ -7,6 +7,7 @@ import ( "github.com/google/uuid" "github.com/prometheus/client_golang/prometheus" + "golang.org/x/exp/maps" "k8s.io/utils/clock" "github.com/armadaproject/armada/internal/common/armadacontext" @@ -14,18 +15,20 @@ import ( armadamaps "github.com/armadaproject/armada/internal/common/maps" commonmetrics "github.com/armadaproject/armada/internal/common/metrics" "github.com/armadaproject/armada/internal/common/resource" + "github.com/armadaproject/armada/internal/common/slices" "github.com/armadaproject/armada/internal/scheduler/database" "github.com/armadaproject/armada/internal/scheduler/floatingresources" "github.com/armadaproject/armada/internal/scheduler/jobdb" "github.com/armadaproject/armada/internal/scheduler/queue" "github.com/armadaproject/armada/internal/scheduler/schedulerobjects" + "github.com/armadaproject/armada/pkg/api" ) // Metrics Recorders associated with a queue type queueState struct { queuedJobRecorder *commonmetrics.JobMetricsRecorder runningJobRecorder *commonmetrics.JobMetricsRecorder - priority float64 + queue *api.Queue } // metricProvider is a simple implementation of QueueMetricProvider @@ -33,9 +36,9 @@ type metricProvider struct { queueStates map[string]*queueState } -func (m metricProvider) GetQueuePriorites() map[string]float64 { - return armadamaps.MapValues(m.queueStates, func(v *queueState) float64 { - return v.priority +func (m metricProvider) GetAllQueues() []*api.Queue { + return slices.Map(maps.Values(m.queueStates), func(state *queueState) *api.Queue { + return state.queue }) } @@ -154,7 +157,7 @@ func (c *MetricsCollector) updateQueueMetrics(ctx *armadacontext.Context) ([]pro provider.queueStates[queue.Name] = &queueState{ queuedJobRecorder: commonmetrics.NewJobMetricsRecorder(), runningJobRecorder: commonmetrics.NewJobMetricsRecorder(), - priority: queue.PriorityFactor, + queue: queue, } queuedJobsCount[queue.Name] = 0 schedulingKeysByQueue[queue.Name] = map[schedulerobjects.SchedulingKey]bool{} @@ -200,7 +203,14 @@ func (c *MetricsCollector) updateQueueMetrics(ctx *armadacontext.Context) ([]pro continue } recorder = qs.queuedJobRecorder - timeInState = currentTime.Sub(time.Unix(0, job.Created())) + queuedTime := time.Unix(0, job.Created()) + if job.HasRuns() { + terminationTimeOfLatestRun := job.LatestRun().TerminatedTime() + if terminationTimeOfLatestRun != nil && terminationTimeOfLatestRun.After(queuedTime) { + queuedTime = *terminationTimeOfLatestRun + } + } + timeInState = currentTime.Sub(queuedTime) queuedJobsCount[job.Queue()]++ schedulingKeysByQueue[job.Queue()][job.SchedulingKey()] = true } else { diff --git a/internal/scheduler/metrics_test.go b/internal/scheduler/metrics_test.go index 11a8ee455a3..11ae37ea65b 100644 --- a/internal/scheduler/metrics_test.go +++ b/internal/scheduler/metrics_test.go @@ -5,6 +5,7 @@ import ( "time" "github.com/golang/mock/gomock" + "github.com/google/uuid" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -31,6 +32,20 @@ func TestMetricsCollector_TestCollect_QueueMetrics(t *testing.T) { runningJobs[i] = testfixtures.TestRunningJobDbJob(startTime) } + // Run that has been returned + runStartTime := testfixtures.BaseTime.Add(-time.Duration(400) * time.Second).UnixNano() + runTerminatedTime := testfixtures.BaseTime.Add(-time.Duration(200) * time.Second) + run := jobdb.MinimalRun(uuid.New(), runStartTime) + run = run.WithFailed(true) + run = run.WithReturned(true) + run = run.WithTerminatedTime(&runTerminatedTime) + + jobCreationTime := testfixtures.BaseTime.Add(-time.Duration(500) * time.Second).UnixNano() + jobWithTerminatedRun := testfixtures.TestQueuedJobDbJob().WithCreated(jobCreationTime).WithUpdatedRun(run) + + queue := testfixtures.MakeTestQueue() + queue.Labels = map[string]string{"foo": "bar"} + tests := map[string]struct { initialJobs []*jobdb.Job defaultPool string @@ -40,7 +55,7 @@ func TestMetricsCollector_TestCollect_QueueMetrics(t *testing.T) { }{ "queued metrics": { initialJobs: queuedJobs, - queues: []*api.Queue{testfixtures.MakeTestQueue()}, + queues: []*api.Queue{queue}, defaultPool: testfixtures.TestPool, expected: []prometheus.Metric{ commonmetrics.NewQueueSizeMetric(3.0, testfixtures.TestQueue), @@ -61,11 +76,40 @@ func TestMetricsCollector_TestCollect_QueueMetrics(t *testing.T) { commonmetrics.NewMaxQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), commonmetrics.NewMedianQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), commonmetrics.NewCountQueueResources(3, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), + commonmetrics.NewQueuePriorityMetric(100, testfixtures.TestQueue), + commonmetrics.NewQueueLabelsMetric(testfixtures.TestQueue, map[string]string{"foo": "bar"}), + }, + }, + "queued metrics for requeued job": { + // This job was been requeued and has a terminated run + // The queue duration stats should count from the time the last run finished instead of job creation time + initialJobs: []*jobdb.Job{jobWithTerminatedRun}, + queues: []*api.Queue{queue}, + defaultPool: testfixtures.TestPool, + expected: []prometheus.Metric{ + commonmetrics.NewQueueSizeMetric(1.0, testfixtures.TestQueue), + commonmetrics.NewQueueDistinctSchedulingKeyMetric(1.0, testfixtures.TestQueue), + commonmetrics.NewQueueDuration(1, 200, + map[float64]uint64{60: 0, 600: 1, 1800: 1, 3600: 1, 10800: 1, 43200: 1, 86400: 1, 172800: 1, 604800: 1}, + testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue), + commonmetrics.NewMinQueueDuration(200, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue), + commonmetrics.NewMaxQueueDuration(200, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue), + commonmetrics.NewMedianQueueDuration(200, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue), + commonmetrics.NewQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "cpu"), + commonmetrics.NewMinQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "cpu"), + commonmetrics.NewMaxQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "cpu"), + commonmetrics.NewMedianQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "cpu"), + commonmetrics.NewCountQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "cpu"), + commonmetrics.NewQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), + commonmetrics.NewMinQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), + commonmetrics.NewMaxQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), + commonmetrics.NewMedianQueueResources(gb, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), + commonmetrics.NewCountQueueResources(1, testfixtures.TestPool, testfixtures.TestDefaultPriorityClass, testfixtures.TestQueue, "memory"), }, }, "running metrics": { initialJobs: runningJobs, - queues: []*api.Queue{testfixtures.MakeTestQueue()}, + queues: []*api.Queue{queue}, defaultPool: testfixtures.TestPool, expected: []prometheus.Metric{ commonmetrics.NewQueueSizeMetric(0.0, testfixtures.TestQueue), diff --git a/pkg/api/api.swagger.go b/pkg/api/api.swagger.go index 43f29c0aaf6..89ed2667b1a 100644 --- a/pkg/api/api.swagger.go +++ b/pkg/api/api.swagger.go @@ -1888,6 +1888,12 @@ func SwaggerJsonTemplate() string { " }\n" + " },\n" + " \"labels\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " },\n" + + " \"labelsDeprecated\": {\n" + " \"type\": \"array\",\n" + " \"title\": \"A list of Kubernetes-like key-value labels, e.g. armadaproject.io/priority=critical\",\n" + " \"items\": {\n" + diff --git a/pkg/api/api.swagger.json b/pkg/api/api.swagger.json index 6266a58ca09..38db0a3c2fc 100644 --- a/pkg/api/api.swagger.json +++ b/pkg/api/api.swagger.json @@ -1877,6 +1877,12 @@ } }, "labels": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "labelsDeprecated": { "type": "array", "title": "A list of Kubernetes-like key-value labels, e.g. armadaproject.io/priority=critical", "items": { diff --git a/pkg/api/submit.pb.go b/pkg/api/submit.pb.go index b7bb646e4e0..5edf4bd0777 100644 --- a/pkg/api/submit.pb.go +++ b/pkg/api/submit.pb.go @@ -1170,7 +1170,8 @@ type Queue struct { // Determines whether scheduling is enabled for this queue. Cordoned bool `protobuf:"varint,8,opt,name=cordoned,proto3" json:"cordoned,omitempty"` // A list of Kubernetes-like key-value labels, e.g. armadaproject.io/priority=critical - Labels []string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty"` + LabelsDeprecated []string `protobuf:"bytes,9,rep,name=labels_deprecated,json=labelsDeprecated,proto3" json:"labelsDeprecated,omitempty"` // Deprecated: Do not use. + Labels map[string]string `protobuf:"bytes,10,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (m *Queue) Reset() { *m = Queue{} } @@ -1263,7 +1264,15 @@ func (m *Queue) GetCordoned() bool { return false } -func (m *Queue) GetLabels() []string { +// Deprecated: Do not use. +func (m *Queue) GetLabelsDeprecated() []string { + if m != nil { + return m.LabelsDeprecated + } + return nil +} + +func (m *Queue) GetLabels() map[string]string { if m != nil { return m.Labels } @@ -2189,6 +2198,7 @@ func init() { proto.RegisterType((*JobSubmitResponseItem)(nil), "api.JobSubmitResponseItem") proto.RegisterType((*JobSubmitResponse)(nil), "api.JobSubmitResponse") proto.RegisterType((*Queue)(nil), "api.Queue") + proto.RegisterMapType((map[string]string)(nil), "api.Queue.LabelsEntry") proto.RegisterMapType((map[string]*PriorityClassResourceLimits)(nil), "api.Queue.ResourceLimitsByPriorityClassNameEntry") proto.RegisterMapType((map[string]float64)(nil), "api.Queue.ResourceLimitsEntry") proto.RegisterType((*Queue_Permissions)(nil), "api.Queue.Permissions") @@ -2217,197 +2227,199 @@ func init() { func init() { proto.RegisterFile("pkg/api/submit.proto", fileDescriptor_e998bacb27df16c1) } var fileDescriptor_e998bacb27df16c1 = []byte{ - // 3025 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x5a, 0x4b, 0x6c, 0x1b, 0xd7, - 0xb9, 0xd6, 0x88, 0x7a, 0xf1, 0xa7, 0x1e, 0xd4, 0x11, 0x25, 0x8f, 0x68, 0x47, 0x94, 0x27, 0x89, - 0x43, 0x2b, 0xbe, 0x64, 0xac, 0xdc, 0xe0, 0xda, 0xbe, 0xb9, 0xd7, 0x15, 0x29, 0xda, 0x96, 0x6c, - 0xd3, 0x0a, 0x65, 0xe5, 0x51, 0x14, 0x65, 0x87, 0x9c, 0x23, 0x6a, 0x24, 0x72, 0x86, 0x99, 0x19, - 0xca, 0x55, 0x8b, 0x6c, 0x8a, 0x02, 0x5d, 0x74, 0x13, 0xb4, 0xcb, 0x02, 0x4d, 0x81, 0x76, 0x95, - 0xae, 0xbb, 0x29, 0x5a, 0xa0, 0xcb, 0x2e, 0x03, 0x74, 0xd3, 0x6e, 0x88, 0x22, 0x69, 0x1b, 0x80, - 0xbb, 0xee, 0xbb, 0x28, 0xce, 0x63, 0x66, 0xce, 0xf0, 0x4d, 0xd9, 0x4a, 0x36, 0xdd, 0x69, 0xbe, - 0xf3, 0x3f, 0xcf, 0xf9, 0xcf, 0xff, 0x38, 0x14, 0xc4, 0xea, 0x27, 0x95, 0xb4, 0x5a, 0xd7, 0xd3, - 0x76, 0xa3, 0x54, 0xd3, 0x9d, 0x54, 0xdd, 0x32, 0x1d, 0x13, 0x85, 0xd4, 0xba, 0x1e, 0xbf, 0x5c, - 0x31, 0xcd, 0x4a, 0x15, 0xa7, 0x29, 0x54, 0x6a, 0x1c, 0xa6, 0x71, 0xad, 0xee, 0x9c, 0x31, 0x8a, - 0x78, 0xa2, 0x7d, 0xd1, 0xd1, 0x6b, 0xd8, 0x76, 0xd4, 0x5a, 0x9d, 0x13, 0x28, 0x27, 0xb7, 0xec, - 0x94, 0x6e, 0x52, 0xd9, 0x65, 0xd3, 0xc2, 0xe9, 0xd3, 0x9b, 0xe9, 0x0a, 0x36, 0xb0, 0xa5, 0x3a, - 0x58, 0xe3, 0x34, 0x49, 0x81, 0xc6, 0xc0, 0xce, 0x33, 0xd3, 0x3a, 0xd1, 0x8d, 0x4a, 0x37, 0xca, - 0x2b, 0x5c, 0x1d, 0xa1, 0x54, 0x0d, 0xc3, 0x74, 0x54, 0x47, 0x37, 0x0d, 0x9b, 0xaf, 0x7a, 0x4e, - 0x1c, 0x61, 0xb5, 0xea, 0x1c, 0x31, 0x54, 0xf9, 0x24, 0x0c, 0xb1, 0x5d, 0xb3, 0xb4, 0x4f, 0x1d, - 0x2b, 0xe0, 0x0f, 0x1b, 0xd8, 0x76, 0x76, 0x1c, 0x5c, 0x43, 0x9b, 0x30, 0x53, 0xb7, 0x74, 0xd3, - 0xd2, 0x9d, 0x33, 0x59, 0x5a, 0x97, 0x92, 0x52, 0x66, 0xa5, 0xd5, 0x4c, 0x20, 0x17, 0xbb, 0x61, - 0xd6, 0x74, 0x87, 0xfa, 0x5a, 0xf0, 0xe8, 0xd0, 0x5b, 0x10, 0x36, 0xd4, 0x1a, 0xb6, 0xeb, 0x6a, - 0x19, 0xcb, 0xa1, 0x75, 0x29, 0x19, 0xce, 0x5c, 0x6a, 0x35, 0x13, 0x4b, 0x1e, 0x28, 0x70, 0xf9, - 0x94, 0xe8, 0x4d, 0x08, 0x97, 0xab, 0x3a, 0x36, 0x9c, 0xa2, 0xae, 0xc9, 0x33, 0x94, 0x8d, 0xea, - 0x62, 0xe0, 0x8e, 0x26, 0xea, 0x72, 0x31, 0xb4, 0x0f, 0x53, 0x55, 0xb5, 0x84, 0xab, 0xb6, 0x3c, - 0xb1, 0x1e, 0x4a, 0x46, 0x36, 0x5f, 0x4d, 0xa9, 0x75, 0x3d, 0xd5, 0xcd, 0x95, 0xd4, 0x23, 0x4a, - 0x97, 0x33, 0x1c, 0xeb, 0x2c, 0x13, 0x6b, 0x35, 0x13, 0x51, 0xc6, 0x28, 0x88, 0xe5, 0xa2, 0x50, - 0x05, 0x22, 0xc2, 0xc6, 0xc9, 0x93, 0x54, 0xf2, 0x46, 0x6f, 0xc9, 0x5b, 0x3e, 0x31, 0x13, 0xbf, - 0xda, 0x6a, 0x26, 0x96, 0x05, 0x11, 0x82, 0x0e, 0x51, 0x32, 0xfa, 0x91, 0x04, 0x31, 0x0b, 0x7f, - 0xd8, 0xd0, 0x2d, 0xac, 0x15, 0x0d, 0x53, 0xc3, 0x45, 0xee, 0xcc, 0x14, 0x55, 0x79, 0xb3, 0xb7, - 0xca, 0x02, 0xe7, 0xca, 0x9b, 0x1a, 0x16, 0x1d, 0x53, 0x5a, 0xcd, 0xc4, 0x15, 0xab, 0x63, 0xd1, - 0x37, 0x40, 0x96, 0x0a, 0xa8, 0x73, 0x1d, 0x3d, 0x81, 0x99, 0xba, 0xa9, 0x15, 0xed, 0x3a, 0x2e, - 0xcb, 0xe3, 0xeb, 0x52, 0x32, 0xb2, 0x79, 0x39, 0xc5, 0x22, 0x8e, 0xda, 0x40, 0xa2, 0x32, 0x75, - 0x7a, 0x33, 0xb5, 0x67, 0x6a, 0xfb, 0x75, 0x5c, 0xa6, 0xe7, 0xb9, 0x58, 0x67, 0x1f, 0x01, 0xd9, - 0xd3, 0x1c, 0x44, 0x7b, 0x10, 0x76, 0x05, 0xda, 0xf2, 0x34, 0x75, 0xa7, 0xaf, 0x44, 0x16, 0x56, - 0xec, 0xc3, 0x0e, 0x84, 0x15, 0xc7, 0x50, 0x16, 0xa6, 0x75, 0xa3, 0x62, 0x61, 0xdb, 0x96, 0xc3, - 0x54, 0x1e, 0xa2, 0x82, 0x76, 0x18, 0x96, 0x35, 0x8d, 0x43, 0xbd, 0x92, 0x59, 0x26, 0x86, 0x71, - 0x32, 0x41, 0x8a, 0xcb, 0x89, 0xee, 0xc1, 0x8c, 0x8d, 0xad, 0x53, 0xbd, 0x8c, 0x6d, 0x19, 0x04, - 0x29, 0xfb, 0x0c, 0xe4, 0x52, 0xa8, 0x31, 0x2e, 0x9d, 0x68, 0x8c, 0x8b, 0x91, 0x18, 0xb7, 0xcb, - 0x47, 0x58, 0x6b, 0x54, 0xb1, 0x25, 0x47, 0xfc, 0x18, 0xf7, 0x40, 0x31, 0xc6, 0x3d, 0x30, 0xae, - 0x42, 0x44, 0x38, 0x2d, 0xf4, 0x32, 0x84, 0x4e, 0x30, 0xbb, 0x58, 0xe1, 0xcc, 0x62, 0xab, 0x99, - 0x98, 0x3b, 0xc1, 0xe2, 0x9d, 0x22, 0xab, 0xe8, 0x3a, 0x4c, 0x9e, 0xaa, 0xd5, 0x06, 0xa6, 0xe7, - 0x12, 0xce, 0x2c, 0xb5, 0x9a, 0x89, 0x05, 0x0a, 0x08, 0x84, 0x8c, 0xe2, 0xce, 0xf8, 0x2d, 0x29, - 0x7e, 0x08, 0xd1, 0xf6, 0x78, 0xbc, 0x10, 0x3d, 0x35, 0xb8, 0xd4, 0x23, 0x08, 0x2f, 0x42, 0x9d, - 0xf2, 0xcf, 0x10, 0xcc, 0x05, 0x8e, 0x1a, 0xdd, 0x81, 0x09, 0xe7, 0xac, 0x8e, 0xa9, 0x9a, 0xf9, - 0xcd, 0xa8, 0x18, 0x0c, 0x4f, 0xcf, 0xea, 0x98, 0xde, 0xf1, 0x79, 0x42, 0x11, 0x08, 0x50, 0xca, - 0x43, 0x94, 0xd7, 0x4d, 0xcb, 0xb1, 0xe5, 0xf1, 0xf5, 0x50, 0x72, 0x8e, 0x29, 0xa7, 0x80, 0xa8, - 0x9c, 0x02, 0xe8, 0x3b, 0xc1, 0x64, 0x10, 0xa2, 0x41, 0xf3, 0x72, 0x67, 0xe8, 0x9d, 0x3f, 0x0b, - 0xdc, 0x86, 0x88, 0x53, 0xb5, 0x8b, 0xd8, 0x50, 0x4b, 0x55, 0xac, 0xc9, 0x13, 0xeb, 0x52, 0x72, - 0x26, 0x23, 0xb7, 0x9a, 0x89, 0x98, 0x43, 0x76, 0x94, 0xa2, 0x02, 0x2f, 0xf8, 0x28, 0xcd, 0x99, - 0xd8, 0x72, 0x8a, 0x24, 0x8b, 0xca, 0x93, 0x42, 0xce, 0xc4, 0x96, 0x93, 0x57, 0x6b, 0x38, 0x90, - 0x33, 0x39, 0x86, 0xee, 0xc2, 0x5c, 0xc3, 0xc6, 0xc5, 0x72, 0xb5, 0x61, 0x3b, 0xd8, 0xda, 0xd9, - 0x93, 0xa7, 0xa8, 0xc6, 0x78, 0xab, 0x99, 0x58, 0x69, 0xd8, 0x38, 0xeb, 0xe2, 0x02, 0xf3, 0xac, - 0x88, 0x7f, 0x55, 0x21, 0xa6, 0x38, 0x30, 0x17, 0xb8, 0x97, 0xe8, 0x56, 0x97, 0x23, 0xe7, 0x14, - 0xf4, 0xc8, 0x51, 0xe7, 0x91, 0x8f, 0x7c, 0xe0, 0xca, 0x5f, 0x24, 0x88, 0xb6, 0xe7, 0x5c, 0xc2, - 0xff, 0x61, 0x03, 0x37, 0x30, 0x77, 0x90, 0xf2, 0x53, 0x40, 0xe4, 0xa7, 0x00, 0xfa, 0x6f, 0x80, - 0x63, 0xb3, 0x54, 0xb4, 0x31, 0x2d, 0x64, 0xe3, 0xfe, 0xa1, 0x1c, 0x9b, 0xa5, 0x7d, 0xdc, 0x56, - 0xc8, 0x5c, 0x0c, 0x69, 0xb0, 0x48, 0xb8, 0x2c, 0xa6, 0xaf, 0x48, 0x08, 0xdc, 0x60, 0x5b, 0xed, - 0x59, 0x06, 0x32, 0x2f, 0xb5, 0x9a, 0x89, 0xd5, 0x63, 0xb3, 0x24, 0x60, 0xa2, 0x47, 0x0b, 0x6d, - 0x4b, 0xca, 0x2f, 0x25, 0x58, 0xdc, 0x35, 0x4b, 0x7b, 0x16, 0x26, 0x04, 0x5f, 0x99, 0x73, 0xff, - 0x05, 0xd3, 0x84, 0x4b, 0xd7, 0x98, 0x4b, 0x61, 0x56, 0x7f, 0x8f, 0xcd, 0xd2, 0x8e, 0x16, 0xa8, - 0xbf, 0x0c, 0x51, 0xfe, 0xc5, 0x4e, 0x20, 0xab, 0x1a, 0x65, 0x5c, 0x75, 0x8d, 0xdc, 0x80, 0x29, - 0x26, 0x43, 0xb4, 0x92, 0x32, 0x88, 0x56, 0x52, 0xe0, 0x9c, 0x56, 0x7a, 0xdb, 0x10, 0x1a, 0xb8, - 0x0d, 0x82, 0x43, 0x13, 0x83, 0x1d, 0x42, 0x37, 0x60, 0xca, 0xc2, 0xaa, 0x6d, 0x1a, 0xfc, 0x8e, - 0x52, 0x6a, 0x86, 0x88, 0xd4, 0x0c, 0x51, 0xfe, 0x2e, 0xc1, 0xd2, 0x2e, 0x35, 0x2a, 0xb8, 0x03, - 0x41, 0xaf, 0xa4, 0x51, 0xbd, 0x1a, 0x1f, 0xe8, 0xd5, 0x5d, 0x98, 0x3a, 0xd4, 0xab, 0x0e, 0xb6, - 0xe8, 0x0e, 0x44, 0x36, 0x17, 0xbd, 0xc0, 0xc3, 0xce, 0x3d, 0xba, 0xc0, 0x2c, 0x67, 0x44, 0xa2, - 0xe5, 0x0c, 0x11, 0xfc, 0x9c, 0x18, 0xc2, 0xcf, 0x87, 0x30, 0x2b, 0xca, 0x46, 0xff, 0x0b, 0x53, - 0xb6, 0xa3, 0x3a, 0xd8, 0x96, 0xa5, 0xf5, 0x50, 0x72, 0x7e, 0x73, 0xce, 0x53, 0x4f, 0x50, 0x26, - 0x8c, 0x11, 0x88, 0xc2, 0x18, 0xa2, 0x7c, 0xb9, 0x00, 0xa1, 0x5d, 0xb3, 0x84, 0xd6, 0x61, 0xdc, - 0xdb, 0x9c, 0x68, 0xab, 0x99, 0x98, 0xd5, 0xc5, 0x6d, 0x19, 0xd7, 0xb5, 0x60, 0x9f, 0x39, 0x37, - 0x64, 0x9f, 0x79, 0xe1, 0x11, 0x15, 0x68, 0x9a, 0xa7, 0x87, 0x6e, 0x9a, 0x33, 0x5e, 0xff, 0xcb, - 0x7a, 0xa2, 0x98, 0xbb, 0x67, 0x23, 0xb4, 0xbb, 0xef, 0x06, 0x2b, 0x1c, 0x04, 0x93, 0xce, 0xf9, - 0xeb, 0xda, 0x69, 0x8f, 0xe6, 0x36, 0x42, 0x15, 0xac, 0x7b, 0x0a, 0x5e, 0x74, 0x2f, 0x7b, 0x1d, - 0x26, 0xcd, 0x67, 0x06, 0xb6, 0xf8, 0x10, 0x41, 0x77, 0x9d, 0x02, 0xe2, 0xae, 0x53, 0x00, 0x61, - 0xb8, 0x4c, 0xb7, 0xbf, 0x48, 0x3f, 0xed, 0x23, 0xbd, 0x5e, 0x6c, 0xd8, 0xd8, 0x2a, 0x56, 0x2c, - 0xb3, 0x51, 0xb7, 0xe5, 0x05, 0x7a, 0xb7, 0xaf, 0xb5, 0x9a, 0x09, 0x85, 0x92, 0x3d, 0x71, 0xa9, - 0x0e, 0x6c, 0x6c, 0xdd, 0xa7, 0x34, 0x82, 0x4c, 0xb9, 0x17, 0x0d, 0xfa, 0xa1, 0x04, 0xd7, 0xca, - 0x66, 0xad, 0x4e, 0xba, 0x05, 0xac, 0x15, 0xfb, 0xa9, 0x5c, 0x5a, 0x97, 0x92, 0xb3, 0x99, 0x37, - 0x5a, 0xcd, 0xc4, 0x0d, 0x9f, 0xe3, 0x9d, 0xc1, 0xca, 0x95, 0xc1, 0xd4, 0x81, 0x61, 0x6e, 0x62, - 0xc8, 0x61, 0x4e, 0x1c, 0x0c, 0x26, 0x5f, 0xf8, 0x60, 0x30, 0xfb, 0x22, 0x06, 0x83, 0x5f, 0x48, - 0xb0, 0xce, 0x5b, 0x6c, 0xdd, 0xa8, 0x14, 0x2d, 0x6c, 0x9b, 0x0d, 0xab, 0x8c, 0x8b, 0x3c, 0x34, - 0x6a, 0xd8, 0x70, 0x6c, 0x79, 0x99, 0xda, 0x9e, 0xec, 0xa6, 0xa9, 0xc0, 0x19, 0x0a, 0x02, 0x7d, - 0xe6, 0x46, 0xab, 0x99, 0x48, 0xfa, 0x52, 0xbb, 0xd1, 0x08, 0xc6, 0xac, 0xf5, 0xa7, 0x44, 0x0f, - 0x61, 0xba, 0x6c, 0x61, 0x32, 0xa4, 0xd3, 0x66, 0x2b, 0xb2, 0x19, 0x4f, 0xb1, 0x29, 0x3d, 0xe5, - 0x3e, 0x0a, 0xa4, 0x9e, 0xba, 0x8f, 0x02, 0x6c, 0x86, 0xe1, 0xe4, 0xe2, 0x0c, 0xc3, 0x21, 0x71, - 0x10, 0x9a, 0x7f, 0x21, 0x83, 0x50, 0xf4, 0x39, 0x06, 0xa1, 0x6f, 0x41, 0xe4, 0xe4, 0x96, 0x5d, - 0x74, 0x0d, 0x5a, 0xa4, 0xa2, 0xae, 0x8a, 0xdb, 0xec, 0xbf, 0x56, 0x90, 0xcd, 0xe6, 0x56, 0xb2, - 0xfe, 0xf6, 0xe4, 0x96, 0xbd, 0xd3, 0x61, 0x22, 0xf8, 0x28, 0x49, 0x4d, 0x44, 0x3a, 0xd7, 0x26, - 0xa3, 0xde, 0xe1, 0xc2, 0xed, 0xf6, 0xe4, 0xf2, 0xef, 0x36, 0xb9, 0x1c, 0x0d, 0x8e, 0x6f, 0xb1, - 0x61, 0xc7, 0x37, 0xb4, 0x03, 0x8b, 0xec, 0xee, 0x3a, 0x4e, 0xb5, 0x68, 0xe3, 0xb2, 0x69, 0x68, - 0xb6, 0xbc, 0xb2, 0x2e, 0x25, 0x43, 0xac, 0x13, 0xa3, 0x8b, 0x4f, 0x9d, 0xea, 0x3e, 0x5b, 0x12, - 0x3b, 0xb1, 0xb6, 0xa5, 0xff, 0x4c, 0x82, 0xe7, 0x9e, 0x0a, 0xfe, 0x21, 0xc1, 0xca, 0x2e, 0xe9, - 0x6b, 0x79, 0x8e, 0xd2, 0xbf, 0x87, 0xdd, 0x0e, 0x49, 0x68, 0xcb, 0xa4, 0x21, 0xda, 0xb2, 0x0b, - 0x2f, 0xea, 0x6f, 0xc3, 0xac, 0x81, 0x9f, 0x15, 0xdb, 0x92, 0x2e, 0xad, 0x9f, 0x06, 0x7e, 0xb6, - 0xd7, 0x99, 0x77, 0x23, 0x02, 0xac, 0xfc, 0x7a, 0x1c, 0x2e, 0x75, 0x38, 0x6a, 0xd7, 0x4d, 0xc3, - 0xc6, 0xe8, 0x67, 0x12, 0xc8, 0x96, 0xbf, 0x40, 0x8f, 0x98, 0x64, 0xbe, 0x46, 0xd5, 0x61, 0xbe, - 0x47, 0x36, 0x6f, 0xbb, 0x05, 0xb6, 0x9b, 0x80, 0x54, 0xa1, 0x8d, 0xb9, 0xc0, 0x78, 0x59, 0xe5, - 0x7d, 0xb5, 0xd5, 0x4c, 0x5c, 0xb5, 0xba, 0x53, 0x08, 0xd6, 0x5e, 0xea, 0x41, 0x12, 0xb7, 0xe0, - 0x4a, 0x3f, 0xf9, 0x17, 0x12, 0x16, 0x06, 0x2c, 0x0b, 0x23, 0x12, 0xf3, 0x92, 0x3e, 0x61, 0x8e, - 0x32, 0x38, 0x5c, 0x87, 0x49, 0x6c, 0x59, 0xa6, 0x25, 0xea, 0xa4, 0x80, 0x48, 0x4a, 0x01, 0xe5, - 0x23, 0x3a, 0x49, 0x05, 0xf5, 0xa1, 0x23, 0x40, 0x6c, 0x8a, 0x63, 0xdf, 0x7c, 0x8c, 0x63, 0xe7, - 0x11, 0x6f, 0x1f, 0xe3, 0x7c, 0x1b, 0x33, 0x6b, 0xad, 0x66, 0x22, 0x4e, 0x87, 0x35, 0x1f, 0x14, - 0x77, 0x3a, 0xda, 0xbe, 0xa6, 0xfc, 0x3e, 0x0c, 0x93, 0xb4, 0xd0, 0xa3, 0x6b, 0x30, 0x41, 0xc7, - 0x7f, 0xe6, 0x1d, 0x1d, 0x81, 0x8d, 0xe0, 0xe8, 0x4f, 0xd7, 0x51, 0x0e, 0x16, 0xdc, 0x40, 0x2c, - 0x1e, 0xaa, 0x65, 0x87, 0x7b, 0x29, 0x65, 0xae, 0xb4, 0x9a, 0x09, 0xd9, 0x5d, 0xba, 0x47, 0x57, - 0x04, 0xe6, 0xf9, 0xe0, 0x0a, 0xba, 0x0d, 0x11, 0xda, 0xaf, 0xb0, 0xf6, 0x85, 0xcf, 0x73, 0x34, - 0xeb, 0x12, 0x98, 0xb5, 0x1d, 0x62, 0xd6, 0xf5, 0x51, 0x72, 0x1d, 0x68, 0x97, 0xe3, 0xf2, 0xb2, - 0xd1, 0x89, 0x5e, 0x07, 0x8a, 0x77, 0x30, 0x47, 0x04, 0x18, 0x55, 0x60, 0xc1, 0x2b, 0xed, 0x55, - 0xbd, 0xa6, 0x3b, 0xee, 0xcb, 0xec, 0x1a, 0xdd, 0x58, 0xba, 0x19, 0x5e, 0x2d, 0x7f, 0x44, 0x09, - 0x58, 0x34, 0x93, 0xcd, 0x95, 0xad, 0xc0, 0x42, 0xa0, 0x35, 0x99, 0x0f, 0xae, 0xa1, 0xdf, 0x48, - 0x70, 0xad, 0x4d, 0x53, 0xb1, 0x74, 0xe6, 0xdd, 0xe2, 0x62, 0xb9, 0xaa, 0xda, 0x36, 0x7b, 0x72, - 0x99, 0x16, 0xde, 0x69, 0xbb, 0x19, 0x90, 0x39, 0x73, 0x6f, 0x73, 0x96, 0x30, 0xe5, 0xd5, 0x1a, - 0x66, 0x36, 0xa5, 0x5b, 0xcd, 0xc4, 0xeb, 0xd6, 0x20, 0x5a, 0x61, 0x2b, 0xae, 0x0e, 0x24, 0x46, - 0xfb, 0x10, 0xa9, 0x63, 0xab, 0xa6, 0xdb, 0x36, 0xed, 0xe3, 0xd9, 0x1b, 0xf2, 0x8a, 0x60, 0xdb, - 0x9e, 0xbf, 0xca, 0x76, 0x5d, 0x20, 0x17, 0x77, 0x5d, 0x80, 0x49, 0xcf, 0x58, 0x36, 0x2d, 0xcd, - 0x34, 0x30, 0x7b, 0x94, 0x9f, 0xe1, 0xc3, 0x12, 0xc7, 0x02, 0xc3, 0x12, 0xc7, 0xc8, 0x18, 0x28, - 0x0c, 0x25, 0xe1, 0xfe, 0xe3, 0x47, 0xfc, 0x4b, 0x09, 0x22, 0x82, 0x65, 0xa8, 0x00, 0x33, 0x76, - 0xa3, 0x74, 0x8c, 0xcb, 0x5e, 0x26, 0x5b, 0xeb, 0xee, 0x43, 0x6a, 0x9f, 0x91, 0xf1, 0x2e, 0x85, - 0xf3, 0x04, 0xba, 0x14, 0x8e, 0xd1, 0x5c, 0x82, 0xad, 0x12, 0x7b, 0xfe, 0x71, 0x73, 0x09, 0x01, - 0x02, 0xb9, 0x84, 0x00, 0xf1, 0x0f, 0x60, 0x9a, 0xcb, 0x25, 0x37, 0xeb, 0x44, 0x37, 0x34, 0xf1, - 0x66, 0x91, 0x6f, 0xf1, 0x66, 0x91, 0x6f, 0xef, 0x06, 0x8e, 0xf7, 0xbf, 0x81, 0x71, 0x1d, 0x96, - 0xba, 0xc4, 0xe7, 0x39, 0xb2, 0xa1, 0x34, 0xb0, 0x26, 0xff, 0x5c, 0x82, 0x6b, 0xc3, 0x85, 0xe2, - 0x70, 0xea, 0x1f, 0x8a, 0xea, 0xdd, 0xe1, 0x2d, 0x20, 0xb0, 0x4d, 0xdb, 0xa0, 0x74, 0xfd, 0x93, - 0x49, 0xb8, 0xdc, 0x87, 0x9f, 0x34, 0xf5, 0xab, 0x35, 0xf5, 0xbb, 0x7a, 0xad, 0x51, 0xf3, 0x3b, - 0xfa, 0x43, 0x4b, 0x2d, 0x93, 0x62, 0xc2, 0xe3, 0xe2, 0xff, 0x06, 0x59, 0x91, 0x7a, 0xcc, 0x24, - 0xb8, 0xe8, 0x3d, 0xce, 0x2f, 0x54, 0xb9, 0x5a, 0x77, 0x0a, 0xb1, 0xca, 0xf5, 0x20, 0x41, 0xbf, - 0x95, 0xe0, 0x6a, 0x4f, 0x13, 0x69, 0xc6, 0x30, 0xcd, 0x2a, 0x8d, 0xb8, 0xc8, 0x66, 0xf6, 0xbc, - 0xa6, 0x66, 0xce, 0xf6, 0x4c, 0xb3, 0xca, 0x0c, 0x7e, 0xbd, 0xd5, 0x4c, 0xbc, 0x56, 0xeb, 0x47, - 0x27, 0x98, 0xfd, 0x52, 0x5f, 0x42, 0x52, 0xa2, 0xfb, 0x6d, 0xce, 0x45, 0x05, 0xa5, 0x32, 0xd8, - 0xcd, 0xe1, 0x54, 0x3f, 0x09, 0x06, 0xe4, 0x2b, 0x9d, 0xfb, 0x4b, 0x04, 0x8e, 0x18, 0x94, 0xbf, - 0x1b, 0x87, 0xc4, 0x00, 0x19, 0xe8, 0x57, 0x43, 0x04, 0xe6, 0xd6, 0x30, 0xd6, 0x5c, 0x68, 0x70, - 0x7e, 0x1d, 0xe7, 0xab, 0xe4, 0x20, 0x4c, 0x93, 0xf4, 0x23, 0xdd, 0x76, 0xd0, 0x2d, 0x98, 0xa2, - 0x4d, 0xb0, 0x9b, 0xc4, 0xc1, 0x4f, 0xe2, 0xac, 0x20, 0xb0, 0x55, 0xb1, 0x20, 0x30, 0x44, 0x39, - 0x00, 0xc4, 0x1e, 0x3e, 0xab, 0x42, 0xe7, 0x88, 0xee, 0xc2, 0x5c, 0x99, 0xa1, 0x58, 0x13, 0x3a, - 0x7c, 0xfa, 0xab, 0x85, 0xb7, 0x10, 0xec, 0xf3, 0x67, 0x45, 0x5c, 0xb9, 0x0d, 0x0b, 0x54, 0xfb, - 0x7d, 0xec, 0x3d, 0x7c, 0x0f, 0xd9, 0x3a, 0x29, 0x6f, 0x03, 0xa2, 0xac, 0x59, 0x5a, 0xe1, 0x46, - 0xe5, 0xfe, 0x7f, 0x88, 0x51, 0xee, 0x03, 0xa3, 0x7c, 0x2e, 0xfe, 0xbb, 0x20, 0xef, 0x3b, 0x16, - 0x56, 0x6b, 0xba, 0x51, 0x69, 0xf7, 0xe0, 0x65, 0x08, 0x19, 0x8d, 0x1a, 0x15, 0x31, 0xc7, 0x8e, - 0xd1, 0x68, 0xd4, 0xc4, 0x63, 0x34, 0x1a, 0x35, 0xcf, 0xfc, 0x6d, 0x5c, 0xc5, 0x0e, 0x1e, 0x55, - 0xfd, 0xa7, 0x12, 0x00, 0x7b, 0xa7, 0xdd, 0x31, 0x0e, 0xcd, 0xa1, 0xdb, 0xcd, 0xdb, 0x10, 0xa1, - 0xe7, 0xa9, 0x15, 0x8f, 0x4d, 0x5a, 0x78, 0xa5, 0xe4, 0x24, 0xeb, 0x13, 0x19, 0xbc, 0x6b, 0x06, - 0xaa, 0x2f, 0xf8, 0x28, 0x61, 0xad, 0x62, 0xd5, 0x76, 0x59, 0x43, 0x3e, 0x2b, 0x83, 0xdb, 0x59, - 0x7d, 0x54, 0x79, 0x06, 0x4b, 0x6c, 0xaf, 0xeb, 0x9a, 0xea, 0xf8, 0xe3, 0xd2, 0x5b, 0xe2, 0x2f, - 0x1c, 0xc1, 0x58, 0xec, 0x37, 0xbf, 0x8d, 0x30, 0x0e, 0x34, 0x40, 0xce, 0xa8, 0x4e, 0xf9, 0xa8, - 0x9b, 0xf6, 0x0f, 0x60, 0xee, 0x50, 0xd5, 0xab, 0xee, 0xcb, 0x9f, 0x7b, 0x23, 0x64, 0xdf, 0x8a, - 0x20, 0x03, 0x0b, 0x6a, 0xc6, 0xf2, 0x4e, 0xfb, 0x2d, 0x99, 0x15, 0x71, 0xcf, 0xdf, 0x2c, 0x7d, - 0x1b, 0xfa, 0xba, 0xfc, 0x6d, 0xd3, 0x3e, 0xd8, 0xdf, 0x20, 0xc3, 0x08, 0xfe, 0x46, 0x20, 0x9c, - 0x33, 0xb4, 0xc7, 0xaa, 0x75, 0x82, 0x2d, 0xe5, 0x63, 0x09, 0x96, 0x83, 0x37, 0xe3, 0x31, 0xb6, - 0x6d, 0xb5, 0x82, 0xd1, 0xff, 0x8c, 0xe6, 0xff, 0x83, 0x31, 0xff, 0x19, 0x3e, 0x84, 0x0d, 0x8d, - 0x17, 0x95, 0x79, 0xca, 0xe6, 0xe9, 0x63, 0xf7, 0x0b, 0x8b, 0x0d, 0xe0, 0x83, 0xb1, 0x02, 0xa1, - 0xcf, 0x4c, 0xc3, 0x24, 0x3e, 0xc5, 0x86, 0xb3, 0x11, 0x87, 0x88, 0xf0, 0x1b, 0x34, 0x8a, 0xc0, - 0x34, 0xff, 0x8c, 0x8e, 0x6d, 0x5c, 0x87, 0x88, 0xf0, 0x63, 0x25, 0x9a, 0x85, 0x99, 0xbc, 0xa9, - 0xe1, 0x3d, 0xd3, 0x72, 0xa2, 0x63, 0xe4, 0xeb, 0x01, 0x56, 0xb5, 0x2a, 0x21, 0x95, 0x36, 0x3e, - 0x91, 0x60, 0xc6, 0xfd, 0xe1, 0x03, 0x01, 0x4c, 0xbd, 0x73, 0x90, 0x3b, 0xc8, 0x6d, 0x47, 0xc7, - 0x88, 0xc0, 0xbd, 0x5c, 0x7e, 0x7b, 0x27, 0x7f, 0x3f, 0x2a, 0x91, 0x8f, 0xc2, 0x41, 0x3e, 0x4f, - 0x3e, 0xc6, 0xd1, 0x1c, 0x84, 0xf7, 0x0f, 0xb2, 0xd9, 0x5c, 0x6e, 0x3b, 0xb7, 0x1d, 0x0d, 0x11, - 0xa6, 0x7b, 0x5b, 0x3b, 0x8f, 0x72, 0xdb, 0xd1, 0x09, 0x42, 0x77, 0x90, 0x7f, 0x98, 0x7f, 0xf2, - 0x5e, 0x3e, 0x3a, 0xc9, 0xe8, 0x32, 0x8f, 0x77, 0x9e, 0x3e, 0xcd, 0x6d, 0x47, 0xa7, 0x08, 0xdd, - 0xa3, 0xdc, 0xd6, 0x7e, 0x6e, 0x3b, 0x3a, 0x4d, 0x96, 0xf6, 0x0a, 0xb9, 0xdc, 0xe3, 0x3d, 0xb2, - 0x34, 0x43, 0x3e, 0xb3, 0x5b, 0xf9, 0x6c, 0xee, 0x11, 0x91, 0x12, 0x26, 0x16, 0x16, 0x72, 0xbb, - 0xb9, 0x2c, 0x59, 0x84, 0xcd, 0x3f, 0x4c, 0xc0, 0x2c, 0xdd, 0x50, 0xf7, 0x49, 0xed, 0x4d, 0x88, - 0xb0, 0x53, 0x65, 0x53, 0xa9, 0xb0, 0xe5, 0xf1, 0x95, 0x8e, 0xc7, 0xce, 0x1c, 0xd9, 0x3c, 0x65, - 0x0c, 0xdd, 0x85, 0x59, 0x81, 0xc9, 0x46, 0xf3, 0x3e, 0x17, 0x29, 0x22, 0xf1, 0x97, 0xe8, 0x77, - 0xaf, 0x40, 0x53, 0xc6, 0x88, 0x56, 0x76, 0x77, 0x46, 0xd4, 0x2a, 0x30, 0x0d, 0xd6, 0x1a, 0xbc, - 0x9d, 0xca, 0x18, 0xfa, 0x06, 0x44, 0x58, 0x2e, 0x65, 0x5a, 0x2f, 0xf9, 0xfc, 0x81, 0x14, 0xdb, - 0xc7, 0x84, 0x14, 0xcc, 0xdc, 0xc7, 0x0e, 0x63, 0x8f, 0xf9, 0xec, 0x7e, 0x66, 0x8f, 0x0b, 0xae, - 0x28, 0x63, 0x68, 0x17, 0xc2, 0x2e, 0xbd, 0x8d, 0x98, 0x7d, 0xbd, 0x6a, 0x42, 0x3c, 0xde, 0x65, - 0x99, 0x5f, 0x0c, 0x65, 0xec, 0x0d, 0x89, 0x58, 0xcf, 0x0a, 0x59, 0x87, 0xf5, 0x81, 0xfa, 0xd6, - 0xc7, 0xfa, 0x6d, 0x98, 0x73, 0x8b, 0x19, 0x93, 0xb1, 0x2a, 0xa4, 0xb2, 0x60, 0x95, 0xeb, 0x2d, - 0x65, 0xf3, 0xc7, 0x61, 0x98, 0x62, 0x6f, 0x21, 0xe8, 0x5d, 0x00, 0xf6, 0x17, 0xcd, 0xff, 0xcb, - 0x5d, 0x7f, 0xf0, 0x8e, 0xaf, 0x74, 0x7f, 0x40, 0x51, 0x56, 0x7f, 0xf0, 0xa7, 0xbf, 0xfd, 0x74, - 0x7c, 0x49, 0x99, 0x4f, 0x9f, 0xde, 0x4c, 0x1f, 0x9b, 0x25, 0xfe, 0xff, 0x79, 0x77, 0xa4, 0x0d, - 0xf4, 0x1e, 0x00, 0x6b, 0x25, 0x82, 0x72, 0x03, 0xbf, 0xab, 0xc6, 0xd9, 0x06, 0x74, 0xb6, 0x1c, - 0x9d, 0x82, 0x59, 0x3f, 0x41, 0x04, 0x7f, 0x1b, 0x66, 0x3d, 0xc1, 0xfb, 0xd8, 0x41, 0xb2, 0xf0, - 0x53, 0x69, 0x50, 0x7a, 0x2f, 0xff, 0xaf, 0x50, 0xe1, 0x2b, 0xca, 0x22, 0x17, 0x6e, 0x63, 0x47, - 0x90, 0x6f, 0x40, 0x54, 0x7c, 0xb6, 0xa3, 0xe6, 0x5f, 0xee, 0xfe, 0xa0, 0xc7, 0xd4, 0x5c, 0xe9, - 0xf7, 0xda, 0xa7, 0x24, 0xa8, 0xb2, 0x55, 0x25, 0xe6, 0x7a, 0x22, 0xbc, 0xdc, 0x61, 0xa2, 0xef, - 0x03, 0x88, 0xf0, 0x7f, 0x0a, 0xa0, 0xaa, 0xbc, 0xad, 0x0e, 0xfe, 0xa7, 0x40, 0x4f, 0x67, 0xe2, - 0x54, 0x7e, 0x4c, 0x59, 0x70, 0xe5, 0xd7, 0x19, 0x1f, 0x11, 0x7d, 0x7f, 0xf4, 0xc4, 0x10, 0xa3, - 0xe2, 0xe6, 0x95, 0x30, 0x11, 0x47, 0x13, 0x33, 0x11, 0x54, 0x7e, 0xbe, 0x64, 0xf1, 0x0a, 0x15, - 0xba, 0xa6, 0xac, 0x12, 0xa1, 0x25, 0x42, 0x85, 0xb5, 0x34, 0xfb, 0x4d, 0x85, 0xd7, 0x29, 0xa2, - 0x24, 0x3f, 0x7a, 0x42, 0xb9, 0x4c, 0x05, 0x2f, 0xc7, 0xa3, 0x9e, 0xb5, 0xe9, 0xef, 0x93, 0x16, - 0xe8, 0x23, 0x6e, 0xf4, 0xf3, 0xe4, 0x1a, 0x6e, 0x74, 0x3c, 0x60, 0x74, 0x83, 0xd2, 0x08, 0x46, - 0xbf, 0xff, 0x9c, 0xf9, 0x48, 0xa6, 0x5a, 0xd0, 0x46, 0x87, 0x07, 0xe8, 0xde, 0x48, 0x79, 0x8a, - 0xcb, 0x41, 0x9d, 0x72, 0xb4, 0x17, 0x94, 0xbf, 0x78, 0xa0, 0x21, 0x24, 0xee, 0x07, 0xdb, 0x88, - 0x37, 0x24, 0x74, 0x07, 0xa6, 0x1e, 0xd0, 0x7f, 0x6b, 0x45, 0x3d, 0x3c, 0x8d, 0xb3, 0x7b, 0xca, - 0x88, 0xb2, 0x47, 0xb8, 0x7c, 0xe2, 0xf5, 0x20, 0xef, 0xff, 0xf1, 0xf3, 0x35, 0xe9, 0xb3, 0xcf, - 0xd7, 0xa4, 0xbf, 0x7e, 0xbe, 0x26, 0x7d, 0xfc, 0xc5, 0xda, 0xd8, 0x67, 0x5f, 0xac, 0x8d, 0xfd, - 0xf9, 0x8b, 0xb5, 0xb1, 0x6f, 0xbe, 0x56, 0xd1, 0x9d, 0xa3, 0x46, 0x29, 0x55, 0x36, 0x6b, 0x69, - 0xd5, 0xaa, 0xa9, 0x9a, 0x5a, 0xb7, 0xcc, 0x63, 0x5c, 0x76, 0xf8, 0x57, 0x9a, 0xff, 0x4b, 0xed, - 0xa7, 0xe3, 0xb1, 0x2d, 0x0a, 0xec, 0xb1, 0xe5, 0xd4, 0x8e, 0x99, 0xda, 0xaa, 0xeb, 0xa5, 0x29, - 0x6a, 0xc3, 0x9b, 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x40, 0x05, 0xa7, 0x40, 0x2c, 0x00, - 0x00, + // 3060 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x3a, 0x4b, 0x6c, 0x1b, 0xd7, + 0xb5, 0x1a, 0x51, 0x3f, 0x1e, 0xea, 0x43, 0x5d, 0x7d, 0x3c, 0xa2, 0x1d, 0x51, 0x9e, 0x24, 0x8e, + 0xac, 0xf8, 0x51, 0xb1, 0xf2, 0x82, 0x67, 0xfb, 0xe5, 0x3d, 0x57, 0xa4, 0x68, 0x5b, 0xb2, 0x2d, + 0x2b, 0x92, 0x95, 0x4f, 0x51, 0x94, 0x1d, 0x72, 0xae, 0xa8, 0x91, 0xc8, 0x99, 0xc9, 0xcc, 0x50, + 0xae, 0x5a, 0x64, 0x53, 0x14, 0xc8, 0xa2, 0x9b, 0xa0, 0x5d, 0x16, 0x68, 0x0a, 0xb4, 0xab, 0x74, + 0xdd, 0x4d, 0xd1, 0x45, 0x97, 0x5d, 0x06, 0xe8, 0xa6, 0xdd, 0x10, 0x45, 0xd2, 0x36, 0x00, 0x77, + 0xdd, 0x77, 0x51, 0xdc, 0x73, 0xe7, 0x73, 0x87, 0x1f, 0x91, 0x94, 0xad, 0x64, 0xd3, 0x1d, 0xef, + 0xb9, 0xe7, 0x7f, 0xcf, 0x3d, 0x9f, 0x3b, 0x84, 0x59, 0xeb, 0xb8, 0xbc, 0xaa, 0x5a, 0xfa, 0xaa, + 0x53, 0x2b, 0x56, 0x75, 0x37, 0x63, 0xd9, 0xa6, 0x6b, 0x92, 0x98, 0x6a, 0xe9, 0xa9, 0xcb, 0x65, + 0xd3, 0x2c, 0x57, 0xe8, 0x2a, 0x82, 0x8a, 0xb5, 0x83, 0x55, 0x5a, 0xb5, 0xdc, 0x53, 0x8e, 0x91, + 0x4a, 0x37, 0x6f, 0xba, 0x7a, 0x95, 0x3a, 0xae, 0x5a, 0xb5, 0x3c, 0x04, 0xe5, 0xf8, 0x96, 0x93, + 0xd1, 0x4d, 0xe4, 0x5d, 0x32, 0x6d, 0xba, 0x7a, 0x72, 0x73, 0xb5, 0x4c, 0x0d, 0x6a, 0xab, 0x2e, + 0xd5, 0x3c, 0x9c, 0x65, 0x01, 0xc7, 0xa0, 0xee, 0x33, 0xd3, 0x3e, 0xd6, 0x8d, 0x72, 0x3b, 0xcc, + 0x2b, 0x9e, 0x38, 0x86, 0xa9, 0x1a, 0x86, 0xe9, 0xaa, 0xae, 0x6e, 0x1a, 0x8e, 0xb7, 0x1b, 0x18, + 0x71, 0x48, 0xd5, 0x8a, 0x7b, 0xc8, 0xa1, 0xca, 0xa7, 0x71, 0x98, 0xdd, 0x32, 0x8b, 0x7b, 0x68, + 0xd8, 0x2e, 0xfd, 0xb0, 0x46, 0x1d, 0x77, 0xd3, 0xa5, 0x55, 0xb2, 0x06, 0x63, 0x96, 0xad, 0x9b, + 0xb6, 0xee, 0x9e, 0xca, 0xd2, 0x92, 0xb4, 0x2c, 0x65, 0xe7, 0x1b, 0xf5, 0x34, 0xf1, 0x61, 0x37, + 0xcc, 0xaa, 0xee, 0xa2, 0xad, 0xbb, 0x01, 0x1e, 0x79, 0x0b, 0xe2, 0x86, 0x5a, 0xa5, 0x8e, 0xa5, + 0x96, 0xa8, 0x1c, 0x5b, 0x92, 0x96, 0xe3, 0xd9, 0x4b, 0x8d, 0x7a, 0x7a, 0x26, 0x00, 0x0a, 0x54, + 0x21, 0x26, 0x79, 0x13, 0xe2, 0xa5, 0x8a, 0x4e, 0x0d, 0xb7, 0xa0, 0x6b, 0xf2, 0x18, 0x92, 0xa1, + 0x2c, 0x0e, 0xdc, 0xd4, 0x44, 0x59, 0x3e, 0x8c, 0xec, 0xc1, 0x48, 0x45, 0x2d, 0xd2, 0x8a, 0x23, + 0x0f, 0x2d, 0xc5, 0x96, 0x13, 0x6b, 0xaf, 0x66, 0x54, 0x4b, 0xcf, 0xb4, 0x33, 0x25, 0xf3, 0x08, + 0xf1, 0xf2, 0x86, 0x6b, 0x9f, 0x66, 0x67, 0x1b, 0xf5, 0x74, 0x92, 0x13, 0x0a, 0x6c, 0x3d, 0x56, + 0xa4, 0x0c, 0x09, 0xc1, 0x71, 0xf2, 0x30, 0x72, 0x5e, 0xe9, 0xcc, 0x79, 0x3d, 0x44, 0xe6, 0xec, + 0x17, 0x1a, 0xf5, 0xf4, 0x9c, 0xc0, 0x42, 0x90, 0x21, 0x72, 0x26, 0x1f, 0x4b, 0x30, 0x6b, 0xd3, + 0x0f, 0x6b, 0xba, 0x4d, 0xb5, 0x82, 0x61, 0x6a, 0xb4, 0xe0, 0x19, 0x33, 0x82, 0x22, 0x6f, 0x76, + 0x16, 0xb9, 0xeb, 0x51, 0x6d, 0x9b, 0x1a, 0x15, 0x0d, 0x53, 0x1a, 0xf5, 0xf4, 0x15, 0xbb, 0x65, + 0x33, 0x54, 0x40, 0x96, 0x76, 0x49, 0xeb, 0x3e, 0x79, 0x02, 0x63, 0x96, 0xa9, 0x15, 0x1c, 0x8b, + 0x96, 0xe4, 0xc1, 0x25, 0x69, 0x39, 0xb1, 0x76, 0x39, 0xc3, 0x23, 0x0e, 0x75, 0x60, 0x51, 0x99, + 0x39, 0xb9, 0x99, 0xd9, 0x31, 0xb5, 0x3d, 0x8b, 0x96, 0xf0, 0x3c, 0xa7, 0x2d, 0xbe, 0x88, 0xf0, + 0x1e, 0xf5, 0x80, 0x64, 0x07, 0xe2, 0x3e, 0x43, 0x47, 0x1e, 0x45, 0x73, 0xce, 0xe4, 0xc8, 0xc3, + 0x8a, 0x2f, 0x9c, 0x48, 0x58, 0x79, 0x30, 0x92, 0x83, 0x51, 0xdd, 0x28, 0xdb, 0xd4, 0x71, 0xe4, + 0x38, 0xf2, 0x23, 0xc8, 0x68, 0x93, 0xc3, 0x72, 0xa6, 0x71, 0xa0, 0x97, 0xb3, 0x73, 0x4c, 0x31, + 0x0f, 0x4d, 0xe0, 0xe2, 0x53, 0x92, 0x7b, 0x30, 0xe6, 0x50, 0xfb, 0x44, 0x2f, 0x51, 0x47, 0x06, + 0x81, 0xcb, 0x1e, 0x07, 0x7a, 0x5c, 0x50, 0x19, 0x1f, 0x4f, 0x54, 0xc6, 0x87, 0xb1, 0x18, 0x77, + 0x4a, 0x87, 0x54, 0xab, 0x55, 0xa8, 0x2d, 0x27, 0xc2, 0x18, 0x0f, 0x80, 0x62, 0x8c, 0x07, 0xc0, + 0x94, 0x0a, 0x09, 0xe1, 0xb4, 0xc8, 0xcb, 0x10, 0x3b, 0xa6, 0xfc, 0x62, 0xc5, 0xb3, 0xd3, 0x8d, + 0x7a, 0x7a, 0xe2, 0x98, 0x8a, 0x77, 0x8a, 0xed, 0x92, 0xeb, 0x30, 0x7c, 0xa2, 0x56, 0x6a, 0x14, + 0xcf, 0x25, 0x9e, 0x9d, 0x69, 0xd4, 0xd3, 0x53, 0x08, 0x10, 0x10, 0x39, 0xc6, 0x9d, 0xc1, 0x5b, + 0x52, 0xea, 0x00, 0x92, 0xcd, 0xf1, 0x78, 0x21, 0x72, 0xaa, 0x70, 0xa9, 0x43, 0x10, 0x5e, 0x84, + 0x38, 0xe5, 0x9f, 0x31, 0x98, 0x88, 0x1c, 0x35, 0xb9, 0x03, 0x43, 0xee, 0xa9, 0x45, 0x51, 0xcc, + 0xe4, 0x5a, 0x52, 0x0c, 0x86, 0xa7, 0xa7, 0x16, 0xc5, 0x3b, 0x3e, 0xc9, 0x30, 0x22, 0x01, 0x8a, + 0x34, 0x4c, 0xb8, 0x65, 0xda, 0xae, 0x23, 0x0f, 0x2e, 0xc5, 0x96, 0x27, 0xb8, 0x70, 0x04, 0x88, + 0xc2, 0x11, 0x40, 0xbe, 0x17, 0x4d, 0x06, 0x31, 0x0c, 0x9a, 0x97, 0x5b, 0x43, 0xef, 0xfc, 0x59, + 0xe0, 0x36, 0x24, 0xdc, 0x8a, 0x53, 0xa0, 0x86, 0x5a, 0xac, 0x50, 0x4d, 0x1e, 0x5a, 0x92, 0x96, + 0xc7, 0xb2, 0x72, 0xa3, 0x9e, 0x9e, 0x75, 0x99, 0x47, 0x11, 0x2a, 0xd0, 0x42, 0x08, 0xc5, 0x9c, + 0x49, 0x6d, 0xb7, 0xc0, 0xb2, 0xa8, 0x3c, 0x2c, 0xe4, 0x4c, 0x6a, 0xbb, 0xdb, 0x6a, 0x95, 0x46, + 0x72, 0xa6, 0x07, 0x23, 0x77, 0x61, 0xa2, 0xe6, 0xd0, 0x42, 0xa9, 0x52, 0x73, 0x5c, 0x6a, 0x6f, + 0xee, 0xc8, 0x23, 0x28, 0x31, 0xd5, 0xa8, 0xa7, 0xe7, 0x6b, 0x0e, 0xcd, 0xf9, 0x70, 0x81, 0x78, + 0x5c, 0x84, 0x7f, 0x5d, 0x21, 0xa6, 0xb8, 0x30, 0x11, 0xb9, 0x97, 0xe4, 0x56, 0x9b, 0x23, 0xf7, + 0x30, 0xf0, 0xc8, 0x49, 0xeb, 0x91, 0xf7, 0x7d, 0xe0, 0xca, 0x5f, 0x24, 0x48, 0x36, 0xe7, 0x5c, + 0x46, 0xff, 0x61, 0x8d, 0xd6, 0xa8, 0x67, 0x20, 0xd2, 0x23, 0x40, 0xa4, 0x47, 0x00, 0xf9, 0x6f, + 0x80, 0x23, 0xb3, 0x58, 0x70, 0x28, 0x16, 0xb2, 0xc1, 0xf0, 0x50, 0x8e, 0xcc, 0xe2, 0x1e, 0x6d, + 0x2a, 0x64, 0x3e, 0x8c, 0x68, 0x30, 0xcd, 0xa8, 0x6c, 0x2e, 0xaf, 0xc0, 0x10, 0xfc, 0x60, 0x5b, + 0xe8, 0x58, 0x06, 0xb2, 0x2f, 0x35, 0xea, 0xe9, 0x85, 0x23, 0xb3, 0x28, 0xc0, 0x44, 0x8b, 0xa6, + 0x9a, 0xb6, 0x94, 0x5f, 0x49, 0x30, 0xbd, 0x65, 0x16, 0x77, 0x6c, 0xca, 0x10, 0xbe, 0x36, 0xe3, + 0xfe, 0x0b, 0x46, 0x19, 0x95, 0xae, 0x71, 0x93, 0xe2, 0xbc, 0xfe, 0x1e, 0x99, 0xc5, 0x4d, 0x2d, + 0x52, 0x7f, 0x39, 0x44, 0xf9, 0x17, 0x3f, 0x81, 0x9c, 0x6a, 0x94, 0x68, 0xc5, 0x57, 0x72, 0x05, + 0x46, 0x38, 0x0f, 0x51, 0x4b, 0x24, 0x10, 0xb5, 0x44, 0xc0, 0x39, 0xb5, 0x0c, 0xdc, 0x10, 0xeb, + 0xea, 0x06, 0xc1, 0xa0, 0xa1, 0xee, 0x06, 0x91, 0x1b, 0x30, 0x62, 0x53, 0xd5, 0x31, 0x0d, 0xef, + 0x8e, 0x22, 0x36, 0x87, 0x88, 0xd8, 0x1c, 0xa2, 0xfc, 0x5d, 0x82, 0x99, 0x2d, 0x54, 0x2a, 0xea, + 0x81, 0xa8, 0x55, 0x52, 0xbf, 0x56, 0x0d, 0x76, 0xb5, 0xea, 0x2e, 0x8c, 0x1c, 0xe8, 0x15, 0x97, + 0xda, 0xe8, 0x81, 0xc4, 0xda, 0x74, 0x10, 0x78, 0xd4, 0xbd, 0x87, 0x1b, 0x5c, 0x73, 0x8e, 0x24, + 0x6a, 0xce, 0x21, 0x82, 0x9d, 0x43, 0x3d, 0xd8, 0xf9, 0x10, 0xc6, 0x45, 0xde, 0xe4, 0x7f, 0x61, + 0xc4, 0x71, 0x55, 0x97, 0x3a, 0xb2, 0xb4, 0x14, 0x5b, 0x9e, 0x5c, 0x9b, 0x08, 0xc4, 0x33, 0x28, + 0x67, 0xc6, 0x11, 0x44, 0x66, 0x1c, 0xa2, 0x7c, 0x35, 0x05, 0xb1, 0x2d, 0xb3, 0x48, 0x96, 0x60, + 0x30, 0x70, 0x4e, 0xb2, 0x51, 0x4f, 0x8f, 0xeb, 0xa2, 0x5b, 0x06, 0x75, 0x2d, 0xda, 0x67, 0x4e, + 0xf4, 0xd8, 0x67, 0x5e, 0x78, 0x44, 0x45, 0x9a, 0xe6, 0xd1, 0x9e, 0x9b, 0xe6, 0x6c, 0xd0, 0xff, + 0xf2, 0x9e, 0x68, 0xd6, 0xf7, 0x59, 0x1f, 0xed, 0xee, 0xbb, 0xd1, 0x0a, 0x07, 0xd1, 0xa4, 0x73, + 0xfe, 0xba, 0x76, 0xd2, 0xa1, 0xb9, 0x4d, 0xa0, 0x80, 0xa5, 0x40, 0xc0, 0x8b, 0xee, 0x65, 0xaf, + 0xc3, 0xb0, 0xf9, 0xcc, 0xa0, 0xb6, 0x37, 0x44, 0xa0, 0xd7, 0x11, 0x20, 0x7a, 0x1d, 0x01, 0x84, + 0xc2, 0x65, 0x74, 0x7f, 0x01, 0x97, 0xce, 0xa1, 0x6e, 0x15, 0x6a, 0x0e, 0xb5, 0x0b, 0x65, 0xdb, + 0xac, 0x59, 0x8e, 0x3c, 0x85, 0x77, 0xfb, 0x5a, 0xa3, 0x9e, 0x56, 0x10, 0xed, 0x89, 0x8f, 0xb5, + 0xef, 0x50, 0xfb, 0x3e, 0xe2, 0x08, 0x3c, 0xe5, 0x4e, 0x38, 0xe4, 0xc7, 0x12, 0x5c, 0x2b, 0x99, + 0x55, 0x8b, 0x75, 0x0b, 0x54, 0x2b, 0x9c, 0x25, 0x72, 0x66, 0x49, 0x5a, 0x1e, 0xcf, 0xbe, 0xd1, + 0xa8, 0xa7, 0x6f, 0x84, 0x14, 0xef, 0x74, 0x17, 0xae, 0x74, 0xc7, 0x8e, 0x0c, 0x73, 0x43, 0x3d, + 0x0e, 0x73, 0xe2, 0x60, 0x30, 0xfc, 0xc2, 0x07, 0x83, 0xf1, 0x17, 0x31, 0x18, 0xfc, 0x52, 0x82, + 0x25, 0xaf, 0xc5, 0xd6, 0x8d, 0x72, 0xc1, 0xa6, 0x8e, 0x59, 0xb3, 0x4b, 0xb4, 0xe0, 0x85, 0x46, + 0x95, 0x1a, 0xae, 0x23, 0xcf, 0xa1, 0xee, 0xcb, 0xed, 0x24, 0xed, 0x7a, 0x04, 0xbb, 0x02, 0x7e, + 0xf6, 0x46, 0xa3, 0x9e, 0x5e, 0x0e, 0xb9, 0xb6, 0xc3, 0x11, 0x94, 0x59, 0x3c, 0x1b, 0x93, 0x3c, + 0x84, 0xd1, 0x92, 0x4d, 0xd9, 0x90, 0x8e, 0xcd, 0x56, 0x62, 0x2d, 0x95, 0xe1, 0x53, 0x7a, 0xc6, + 0x7f, 0x14, 0xc8, 0x3c, 0xf5, 0x1f, 0x05, 0xf8, 0x0c, 0xe3, 0xa1, 0x8b, 0x33, 0x8c, 0x07, 0x12, + 0x07, 0xa1, 0xc9, 0x17, 0x32, 0x08, 0x25, 0x9f, 0x63, 0x10, 0xfa, 0x0e, 0x24, 0x8e, 0x6f, 0x39, + 0x05, 0x5f, 0xa1, 0x69, 0x64, 0x75, 0x55, 0x74, 0x73, 0xf8, 0x5a, 0xc1, 0x9c, 0xed, 0x69, 0xc9, + 0xfb, 0xdb, 0xe3, 0x5b, 0xce, 0x66, 0x8b, 0x8a, 0x10, 0x42, 0x59, 0x6a, 0x62, 0xdc, 0x3d, 0x69, + 0x32, 0xe9, 0x1c, 0x2e, 0x9e, 0xde, 0x01, 0x5f, 0x6f, 0xdd, 0xc4, 0xd7, 0x83, 0x46, 0xc7, 0xb7, + 0xd9, 0x5e, 0xc7, 0x37, 0xb2, 0x09, 0xd3, 0xfc, 0xee, 0xba, 0x6e, 0xa5, 0xe0, 0xd0, 0x92, 0x69, + 0x68, 0x8e, 0x3c, 0xbf, 0x24, 0x2d, 0xc7, 0x78, 0x27, 0x86, 0x9b, 0x4f, 0xdd, 0xca, 0x1e, 0xdf, + 0x12, 0x3b, 0xb1, 0xa6, 0xad, 0xff, 0x4c, 0x82, 0xe7, 0x9e, 0x0a, 0xfe, 0x21, 0xc1, 0xfc, 0x16, + 0xeb, 0x6b, 0xbd, 0x1c, 0xa5, 0xff, 0x80, 0xfa, 0x1d, 0x92, 0xd0, 0x96, 0x49, 0x3d, 0xb4, 0x65, + 0x17, 0x5e, 0xd4, 0xdf, 0x86, 0x71, 0x83, 0x3e, 0x2b, 0x34, 0x25, 0x5d, 0xac, 0x9f, 0x06, 0x7d, + 0xb6, 0xd3, 0x9a, 0x77, 0x13, 0x02, 0x58, 0xf9, 0xcd, 0x20, 0x5c, 0x6a, 0x31, 0xd4, 0xb1, 0x4c, + 0xc3, 0xa1, 0xe4, 0xe7, 0x12, 0xc8, 0x76, 0xb8, 0x81, 0x47, 0xcc, 0x32, 0x5f, 0xad, 0xe2, 0x72, + 0xdb, 0x13, 0x6b, 0xb7, 0xfd, 0x02, 0xdb, 0x8e, 0x41, 0x66, 0xb7, 0x89, 0x78, 0x97, 0xd3, 0xf2, + 0xca, 0xfb, 0x6a, 0xa3, 0x9e, 0xbe, 0x6a, 0xb7, 0xc7, 0x10, 0xb4, 0xbd, 0xd4, 0x01, 0x25, 0x65, + 0xc3, 0x95, 0xb3, 0xf8, 0x5f, 0x48, 0x58, 0x18, 0x30, 0x27, 0x8c, 0x48, 0xdc, 0x4a, 0x7c, 0xc2, + 0xec, 0x67, 0x70, 0xb8, 0x0e, 0xc3, 0xd4, 0xb6, 0x4d, 0x5b, 0x94, 0x89, 0x00, 0x11, 0x15, 0x01, + 0xca, 0x47, 0x38, 0x49, 0x45, 0xe5, 0x91, 0x43, 0x20, 0x7c, 0x8a, 0xe3, 0x6b, 0x6f, 0x8c, 0xe3, + 0xe7, 0x91, 0x6a, 0x1e, 0xe3, 0x42, 0x1d, 0xb3, 0x8b, 0x8d, 0x7a, 0x3a, 0x85, 0xc3, 0x5a, 0x08, + 0x14, 0x3d, 0x9d, 0x6c, 0xde, 0x53, 0x3e, 0x4e, 0xc0, 0x30, 0x16, 0x7a, 0x72, 0x0d, 0x86, 0x70, + 0xfc, 0xe7, 0xd6, 0xe1, 0x08, 0x6c, 0x44, 0x47, 0x7f, 0xdc, 0x27, 0x79, 0x98, 0xf2, 0x03, 0xb1, + 0x70, 0xa0, 0x96, 0x5c, 0xcf, 0x4a, 0x29, 0x7b, 0xa5, 0x51, 0x4f, 0xcb, 0xfe, 0xd6, 0x3d, 0xdc, + 0x11, 0x88, 0x27, 0xa3, 0x3b, 0xe4, 0x36, 0x24, 0xb0, 0x5f, 0xe1, 0xed, 0x8b, 0x37, 0xcf, 0x61, + 0xd6, 0x65, 0x60, 0xde, 0x76, 0x88, 0x59, 0x37, 0x84, 0xb2, 0xeb, 0x80, 0x5d, 0x8e, 0x4f, 0xcb, + 0x47, 0x27, 0xbc, 0x0e, 0x08, 0x6f, 0x21, 0x4e, 0x08, 0x60, 0x52, 0x86, 0xa9, 0xa0, 0xb4, 0x57, + 0xf4, 0xaa, 0xee, 0xfa, 0x2f, 0xb3, 0x8b, 0xe8, 0x58, 0x74, 0x46, 0x50, 0xcb, 0x1f, 0x21, 0x02, + 0x8f, 0x66, 0xe6, 0x5c, 0xd9, 0x8e, 0x6c, 0x44, 0x5a, 0x93, 0xc9, 0xe8, 0x1e, 0xf9, 0xad, 0x04, + 0xd7, 0x9a, 0x24, 0x15, 0x8a, 0xa7, 0xc1, 0x2d, 0x2e, 0x94, 0x2a, 0xaa, 0xe3, 0xf0, 0x27, 0x97, + 0x51, 0xe1, 0x9d, 0xb6, 0x9d, 0x02, 0xd9, 0x53, 0xff, 0x36, 0xe7, 0x18, 0xd1, 0xb6, 0x5a, 0xa5, + 0x5c, 0xa7, 0xd5, 0x46, 0x3d, 0xfd, 0xba, 0xdd, 0x0d, 0x57, 0x70, 0xc5, 0xd5, 0xae, 0xc8, 0x64, + 0x0f, 0x12, 0x16, 0xb5, 0xab, 0xba, 0xe3, 0x60, 0x1f, 0xcf, 0xdf, 0x90, 0xe7, 0x05, 0xdd, 0x76, + 0xc2, 0x5d, 0xee, 0x75, 0x01, 0x5d, 0xf4, 0xba, 0x00, 0x66, 0x3d, 0x63, 0xc9, 0xb4, 0x35, 0xd3, + 0xa0, 0xfc, 0x51, 0x7e, 0xcc, 0x1b, 0x96, 0x3c, 0x58, 0x64, 0x58, 0xf2, 0x60, 0xe4, 0x31, 0x4c, + 0xf3, 0x56, 0xbf, 0xa0, 0x51, 0xcb, 0xa6, 0x25, 0xec, 0x7b, 0xe2, 0x78, 0xd8, 0x4b, 0x2c, 0xd0, + 0xf9, 0xe6, 0x46, 0xb0, 0x17, 0x39, 0x8d, 0x64, 0xf3, 0x2e, 0xd9, 0x08, 0x66, 0x1c, 0x68, 0x31, + 0xa9, 0xe7, 0x29, 0x27, 0xf5, 0x95, 0x04, 0x09, 0xc1, 0x01, 0x64, 0x17, 0xc6, 0x9c, 0x5a, 0xf1, + 0x88, 0x96, 0x82, 0x84, 0xb9, 0xd8, 0xde, 0x55, 0x99, 0x3d, 0x8e, 0xe6, 0x35, 0x43, 0x1e, 0x4d, + 0xa4, 0x19, 0xf2, 0x60, 0x98, 0xb2, 0xa8, 0x5d, 0xe4, 0xaf, 0x4c, 0x7e, 0xca, 0x62, 0x80, 0x48, + 0xca, 0x62, 0x80, 0xd4, 0x07, 0x30, 0xea, 0xf1, 0x65, 0x17, 0xf8, 0x58, 0x37, 0x34, 0xf1, 0x02, + 0xb3, 0xb5, 0x78, 0x81, 0xd9, 0x3a, 0xb8, 0xe8, 0x83, 0x67, 0x5f, 0xf4, 0x94, 0x0e, 0x33, 0x6d, + 0xae, 0xc1, 0x39, 0x92, 0xae, 0xd4, 0xb5, 0xf4, 0xff, 0x42, 0x82, 0x6b, 0xbd, 0x45, 0x7c, 0x6f, + 0xe2, 0x1f, 0x8a, 0xe2, 0xfd, 0x19, 0x31, 0xc2, 0xb0, 0x49, 0x5a, 0x37, 0x05, 0x2f, 0xbe, 0xcd, + 0x52, 0x7e, 0x3a, 0x0c, 0x97, 0xcf, 0x50, 0x91, 0x8d, 0x27, 0x0b, 0x55, 0xf5, 0xfb, 0x7a, 0xb5, + 0x56, 0x0d, 0x67, 0x93, 0x03, 0x5b, 0x2d, 0xb1, 0xb2, 0xe8, 0x85, 0xde, 0xff, 0x75, 0x33, 0x34, + 0xf3, 0x98, 0x73, 0xf0, 0xa1, 0xf7, 0x3c, 0x7a, 0xa1, 0x5e, 0x57, 0xdb, 0x63, 0x88, 0xf5, 0xba, + 0x03, 0x0a, 0xf9, 0x9d, 0x04, 0x57, 0x3b, 0xaa, 0x88, 0xb9, 0xcf, 0x34, 0x2b, 0x18, 0xd4, 0x89, + 0xb5, 0xdc, 0x79, 0x55, 0xcd, 0x9e, 0xee, 0x98, 0x66, 0x85, 0x2b, 0xfc, 0x7a, 0xa3, 0x9e, 0x7e, + 0xad, 0x7a, 0x16, 0x9e, 0xa0, 0xf6, 0x4b, 0x67, 0x22, 0xb2, 0x66, 0xe3, 0x2c, 0xe7, 0x5c, 0x54, + 0xdc, 0x2b, 0xdd, 0xcd, 0xec, 0x4d, 0xf4, 0x93, 0x68, 0xcc, 0xbf, 0xd2, 0xea, 0x5f, 0xc6, 0xb0, + 0xbf, 0xb8, 0x57, 0x7e, 0x3f, 0x08, 0xe9, 0x2e, 0x3c, 0xc8, 0xaf, 0x7b, 0x08, 0xcc, 0xf5, 0x5e, + 0xb4, 0xb9, 0xd0, 0xe0, 0xfc, 0x26, 0xce, 0x57, 0xc9, 0x43, 0x1c, 0xeb, 0xc0, 0x23, 0xdd, 0x71, + 0xc9, 0x2d, 0x18, 0xc1, 0x76, 0xde, 0xaf, 0x13, 0x10, 0xd6, 0x09, 0x5e, 0x73, 0xf8, 0xae, 0x58, + 0x73, 0x38, 0x44, 0xd9, 0x07, 0xc2, 0x9f, 0x70, 0x2b, 0x42, 0x0f, 0x4c, 0xee, 0xc2, 0x44, 0x89, + 0x43, 0xa9, 0x26, 0xcc, 0x2a, 0xf8, 0xfd, 0x25, 0xd8, 0x88, 0x4e, 0x2c, 0xe3, 0x22, 0x5c, 0xb9, + 0x0d, 0x53, 0x28, 0xfd, 0x3e, 0x0d, 0x9e, 0xf0, 0x7b, 0x6c, 0x02, 0x95, 0xb7, 0x81, 0x20, 0x69, + 0x0e, 0x6b, 0x75, 0xbf, 0xd4, 0xff, 0x0f, 0xb3, 0x48, 0xbd, 0x6f, 0x94, 0xce, 0x45, 0x7f, 0x17, + 0xe4, 0x3d, 0xd7, 0xa6, 0x6a, 0x55, 0x37, 0xca, 0xcd, 0x16, 0xbc, 0x0c, 0x31, 0xa3, 0x56, 0x45, + 0x16, 0x13, 0xfc, 0x18, 0x8d, 0x5a, 0x55, 0x3c, 0x46, 0xa3, 0x56, 0x0d, 0xd4, 0xdf, 0xa0, 0x15, + 0xea, 0xd2, 0x7e, 0xc5, 0x7f, 0x26, 0x01, 0xf0, 0x17, 0xe7, 0x4d, 0xe3, 0xc0, 0xec, 0xb9, 0x71, + 0xbe, 0x0d, 0x09, 0x3c, 0x4f, 0xad, 0x70, 0x64, 0x62, 0x6d, 0x97, 0x96, 0x87, 0x79, 0xc7, 0xcb, + 0xc1, 0x5b, 0x66, 0xa4, 0xc0, 0x43, 0x08, 0x65, 0xa4, 0x15, 0xaa, 0x3a, 0x3e, 0x69, 0x2c, 0x24, + 0xe5, 0xe0, 0x66, 0xd2, 0x10, 0xaa, 0x3c, 0x83, 0x19, 0xee, 0x6b, 0x4b, 0x53, 0xdd, 0x70, 0xf0, + 0x7b, 0x4b, 0xfc, 0x56, 0x13, 0x8d, 0xc5, 0xb3, 0x26, 0xd1, 0x3e, 0x06, 0x9b, 0x1a, 0xc8, 0x59, + 0xd5, 0x2d, 0x1d, 0xb6, 0x93, 0xfe, 0x01, 0x4c, 0x1c, 0xa8, 0x7a, 0xc5, 0x7f, 0xc3, 0xf4, 0x6f, + 0x84, 0x1c, 0x6a, 0x11, 0x25, 0xe0, 0x41, 0xcd, 0x49, 0xde, 0x69, 0xbe, 0x25, 0xe3, 0x22, 0x3c, + 0xb0, 0x37, 0x87, 0xaf, 0x5c, 0xdf, 0x94, 0xbd, 0x4d, 0xd2, 0xbb, 0xdb, 0x1b, 0x25, 0xe8, 0xc3, + 0xde, 0x04, 0xc4, 0xf3, 0x86, 0xf6, 0x58, 0xb5, 0x8f, 0xa9, 0xad, 0x7c, 0x22, 0xc1, 0x5c, 0xf4, + 0x66, 0x3c, 0xa6, 0x8e, 0xa3, 0x96, 0x29, 0xf9, 0x9f, 0xfe, 0xec, 0x7f, 0x30, 0x10, 0x7e, 0x50, + 0x88, 0x51, 0x43, 0xf3, 0x8a, 0xca, 0x24, 0x92, 0x05, 0xf2, 0xf8, 0xfd, 0xa2, 0x62, 0x8f, 0xf9, + 0x60, 0x60, 0x97, 0xe1, 0x67, 0x47, 0x61, 0x98, 0x9e, 0x50, 0xc3, 0x5d, 0x49, 0x41, 0x42, 0xf8, + 0x9a, 0x4e, 0x12, 0x30, 0xea, 0x2d, 0x93, 0x03, 0x2b, 0xd7, 0x21, 0x21, 0x7c, 0x76, 0x25, 0xe3, + 0x30, 0xb6, 0x6d, 0x6a, 0x74, 0xc7, 0xb4, 0xdd, 0xe4, 0x00, 0x5b, 0x3d, 0xa0, 0xaa, 0x56, 0x61, + 0xa8, 0xd2, 0xca, 0xa7, 0x12, 0x8c, 0xf9, 0x9f, 0x70, 0x08, 0xc0, 0xc8, 0x3b, 0xfb, 0xf9, 0xfd, + 0xfc, 0x46, 0x72, 0x80, 0x31, 0xdc, 0xc9, 0x6f, 0x6f, 0x6c, 0x6e, 0xdf, 0x4f, 0x4a, 0x6c, 0xb1, + 0xbb, 0xbf, 0xbd, 0xcd, 0x16, 0x83, 0x64, 0x02, 0xe2, 0x7b, 0xfb, 0xb9, 0x5c, 0x3e, 0xbf, 0x91, + 0xdf, 0x48, 0xc6, 0x18, 0xd1, 0xbd, 0xf5, 0xcd, 0x47, 0xf9, 0x8d, 0xe4, 0x10, 0xc3, 0xdb, 0xdf, + 0x7e, 0xb8, 0xfd, 0xe4, 0xbd, 0xed, 0xe4, 0x30, 0xc7, 0xcb, 0x3e, 0xde, 0x7c, 0xfa, 0x34, 0xbf, + 0x91, 0x1c, 0x61, 0x78, 0x8f, 0xf2, 0xeb, 0x7b, 0xf9, 0x8d, 0xe4, 0x28, 0xdb, 0xda, 0xd9, 0xcd, + 0xe7, 0x1f, 0xef, 0xb0, 0xad, 0x31, 0xb6, 0xcc, 0xad, 0x6f, 0xe7, 0xf2, 0x8f, 0x18, 0x97, 0x38, + 0xd3, 0x70, 0x37, 0xbf, 0x95, 0xcf, 0xb1, 0x4d, 0x58, 0xfb, 0xc3, 0x10, 0x8c, 0xa3, 0x43, 0xfd, + 0xc7, 0xc1, 0x37, 0x21, 0xc1, 0x4f, 0x95, 0xcf, 0xd7, 0x82, 0xcb, 0x53, 0xf3, 0x2d, 0xcf, 0xb6, + 0x79, 0xe6, 0x3c, 0x65, 0x80, 0xdc, 0x85, 0x71, 0x81, 0xc8, 0x21, 0x93, 0x21, 0x15, 0x2b, 0x22, + 0xa9, 0x97, 0x70, 0xdd, 0x29, 0xd0, 0x94, 0x01, 0x26, 0x95, 0xdf, 0x9d, 0x3e, 0xa5, 0x0a, 0x44, + 0xdd, 0xa5, 0x46, 0x6f, 0xa7, 0x32, 0x40, 0xbe, 0x05, 0x09, 0x9e, 0x4b, 0xb9, 0xd4, 0x4b, 0x21, + 0x7d, 0x24, 0xc5, 0x9e, 0xa1, 0x42, 0x06, 0xc6, 0xee, 0x53, 0x97, 0x93, 0xcf, 0x86, 0xe4, 0x61, + 0x66, 0x4f, 0x09, 0xa6, 0x28, 0x03, 0x64, 0x0b, 0xe2, 0x3e, 0xbe, 0x43, 0xb8, 0x7e, 0x9d, 0x6a, + 0x42, 0x2a, 0xd5, 0x66, 0xdb, 0xbb, 0x18, 0xca, 0xc0, 0x1b, 0x12, 0xd3, 0x9e, 0x17, 0xb2, 0x16, + 0xed, 0x23, 0xf5, 0xed, 0x0c, 0xed, 0x37, 0x60, 0xc2, 0x2f, 0x66, 0x9c, 0xc7, 0x82, 0x90, 0xca, + 0xa2, 0x55, 0xae, 0x33, 0x97, 0xb5, 0x9f, 0xc4, 0x61, 0x84, 0xbf, 0xea, 0x90, 0x77, 0x01, 0xf8, + 0x2f, 0xcc, 0xff, 0x73, 0x6d, 0x3f, 0xdd, 0xa7, 0xe6, 0xdb, 0x3f, 0x05, 0x29, 0x0b, 0x3f, 0xfa, + 0xd3, 0xdf, 0x7e, 0x36, 0x38, 0xa3, 0x4c, 0xae, 0x9e, 0xdc, 0x5c, 0x3d, 0x32, 0x8b, 0xde, 0x3f, + 0x0d, 0xef, 0x48, 0x2b, 0xe4, 0x3d, 0x00, 0xde, 0x4a, 0x44, 0xf9, 0x46, 0xbe, 0x10, 0xa7, 0xb8, + 0x03, 0x5a, 0x5b, 0x8e, 0x56, 0xc6, 0xbc, 0x9f, 0x60, 0x8c, 0xbf, 0x0b, 0xe3, 0x01, 0xe3, 0x3d, + 0xea, 0x12, 0x59, 0xf8, 0xe8, 0x1b, 0xe5, 0xde, 0xc9, 0xfe, 0x2b, 0xc8, 0x7c, 0x5e, 0x99, 0xf6, + 0x98, 0x3b, 0xd4, 0x15, 0xf8, 0x1b, 0x90, 0x14, 0x1f, 0x20, 0x51, 0xfd, 0xcb, 0xed, 0x9f, 0x26, + 0xb9, 0x98, 0x2b, 0x67, 0xbd, 0x5b, 0x2a, 0x69, 0x14, 0xb6, 0xa0, 0xcc, 0xfa, 0x96, 0x08, 0x6f, + 0x90, 0x94, 0xc9, 0xfb, 0x00, 0x12, 0xde, 0xdf, 0x1b, 0x50, 0x54, 0xe0, 0xea, 0xe8, 0x7f, 0x1e, + 0x3a, 0x1a, 0x93, 0x42, 0xfe, 0xb3, 0xca, 0x94, 0xcf, 0xdf, 0xe2, 0x74, 0x8c, 0xf5, 0xfd, 0xfe, + 0x13, 0xc3, 0x2c, 0xb2, 0x9b, 0x54, 0xe2, 0x8c, 0x1d, 0x26, 0x66, 0xc6, 0xa8, 0xf4, 0x7c, 0xc9, + 0xe2, 0x15, 0x64, 0xba, 0xa8, 0x2c, 0x30, 0xa6, 0x45, 0x86, 0x45, 0xb5, 0x55, 0xfe, 0x75, 0xc8, + 0xab, 0x53, 0x4c, 0xc8, 0x76, 0xff, 0x09, 0xe5, 0x32, 0x32, 0x9e, 0x4b, 0x25, 0x03, 0x6d, 0x57, + 0x7f, 0xc8, 0x5a, 0xa0, 0x8f, 0x3c, 0xa5, 0x9f, 0x27, 0xd7, 0x78, 0x4a, 0xa7, 0x22, 0x4a, 0xd7, + 0x10, 0x47, 0x50, 0xfa, 0xfd, 0xe7, 0xcc, 0x47, 0x32, 0x4a, 0x21, 0x2b, 0x2d, 0x16, 0x90, 0x7b, + 0x7d, 0xe5, 0x29, 0x8f, 0x0f, 0x69, 0xe5, 0xa3, 0xbd, 0xa0, 0xfc, 0xe5, 0x05, 0x1a, 0x21, 0xa2, + 0x3f, 0xb8, 0x23, 0xde, 0x90, 0xc8, 0x1d, 0x18, 0x79, 0x80, 0x7f, 0xd0, 0x25, 0x1d, 0x2c, 0x4d, + 0xf1, 0x7b, 0xca, 0x91, 0x72, 0x87, 0xb4, 0x74, 0x1c, 0xf4, 0x20, 0xef, 0xff, 0xf1, 0x8b, 0x45, + 0xe9, 0xf3, 0x2f, 0x16, 0xa5, 0xbf, 0x7e, 0xb1, 0x28, 0x7d, 0xf2, 0xe5, 0xe2, 0xc0, 0xe7, 0x5f, + 0x2e, 0x0e, 0xfc, 0xf9, 0xcb, 0xc5, 0x81, 0x6f, 0xbf, 0x56, 0xd6, 0xdd, 0xc3, 0x5a, 0x31, 0x53, + 0x32, 0xab, 0xab, 0xaa, 0x5d, 0x55, 0x35, 0xd5, 0xb2, 0xcd, 0x23, 0x5a, 0x72, 0xbd, 0xd5, 0xaa, + 0xf7, 0xe7, 0xe0, 0xcf, 0x06, 0x67, 0xd7, 0x11, 0xb0, 0xc3, 0xb7, 0x33, 0x9b, 0x66, 0x66, 0xdd, + 0xd2, 0x8b, 0x23, 0xa8, 0xc3, 0x9b, 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x0b, 0x6e, 0x2e, 0x1c, + 0x0a, 0x2d, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -4356,10 +4368,29 @@ func (m *Queue) MarshalToSizedBuffer(dAtA []byte) (int, error) { var l int _ = l if len(m.Labels) > 0 { - for iNdEx := len(m.Labels) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Labels[iNdEx]) - copy(dAtA[i:], m.Labels[iNdEx]) - i = encodeVarintSubmit(dAtA, i, uint64(len(m.Labels[iNdEx]))) + for k := range m.Labels { + v := m.Labels[k] + baseI := i + i -= len(v) + copy(dAtA[i:], v) + i = encodeVarintSubmit(dAtA, i, uint64(len(v))) + i-- + dAtA[i] = 0x12 + i -= len(k) + copy(dAtA[i:], k) + i = encodeVarintSubmit(dAtA, i, uint64(len(k))) + i-- + dAtA[i] = 0xa + i = encodeVarintSubmit(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0x52 + } + } + if len(m.LabelsDeprecated) > 0 { + for iNdEx := len(m.LabelsDeprecated) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.LabelsDeprecated[iNdEx]) + copy(dAtA[i:], m.LabelsDeprecated[iNdEx]) + i = encodeVarintSubmit(dAtA, i, uint64(len(m.LabelsDeprecated[iNdEx]))) i-- dAtA[i] = 0x4a } @@ -5673,12 +5704,20 @@ func (m *Queue) Size() (n int) { if m.Cordoned { n += 2 } - if len(m.Labels) > 0 { - for _, s := range m.Labels { + if len(m.LabelsDeprecated) > 0 { + for _, s := range m.LabelsDeprecated { l = len(s) n += 1 + l + sovSubmit(uint64(l)) } } + if len(m.Labels) > 0 { + for k, v := range m.Labels { + _ = k + _ = v + mapEntrySize := 1 + len(k) + sovSubmit(uint64(len(k))) + 1 + len(v) + sovSubmit(uint64(len(v))) + n += mapEntrySize + 1 + sovSubmit(uint64(mapEntrySize)) + } + } return n } @@ -9954,7 +9993,7 @@ func (m *Queue) Unmarshal(dAtA []byte) error { m.Cordoned = bool(v != 0) case 9: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field LabelsDeprecated", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -9982,7 +10021,134 @@ func (m *Queue) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.Labels = append(m.Labels, string(dAtA[iNdEx:postIndex])) + m.LabelsDeprecated = append(m.LabelsDeprecated, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowSubmit + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthSubmit + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthSubmit + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Labels == nil { + m.Labels = make(map[string]string) + } + var mapkey string + var mapvalue string + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowSubmit + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowSubmit + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthSubmit + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthSubmit + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var stringLenmapvalue uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowSubmit + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapvalue |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapvalue := int(stringLenmapvalue) + if intStringLenmapvalue < 0 { + return ErrInvalidLengthSubmit + } + postStringIndexmapvalue := iNdEx + intStringLenmapvalue + if postStringIndexmapvalue < 0 { + return ErrInvalidLengthSubmit + } + if postStringIndexmapvalue > l { + return io.ErrUnexpectedEOF + } + mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue]) + iNdEx = postStringIndexmapvalue + } else { + iNdEx = entryPreIndex + skippy, err := skipSubmit(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthSubmit + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.Labels[mapkey] = mapvalue iNdEx = postIndex default: iNdEx = preIndex diff --git a/pkg/api/submit.proto b/pkg/api/submit.proto index 493d25e39bb..03372a719d6 100644 --- a/pkg/api/submit.proto +++ b/pkg/api/submit.proto @@ -195,7 +195,8 @@ message Queue { // Determines whether scheduling is enabled for this queue. bool cordoned = 8; // A list of Kubernetes-like key-value labels, e.g. armadaproject.io/priority=critical - repeated string labels = 9; + repeated string labels_deprecated = 9 [deprecated=true]; + map labels = 10; } message PriorityClassResourceLimits { diff --git a/pkg/client/queue/queue.go b/pkg/client/queue/queue.go index 385864df256..effb16b6422 100644 --- a/pkg/client/queue/queue.go +++ b/pkg/client/queue/queue.go @@ -2,7 +2,6 @@ package queue import ( "fmt" - "strings" armadamaps "github.com/armadaproject/armada/internal/common/maps" "github.com/armadaproject/armada/pkg/api" @@ -13,8 +12,8 @@ type Queue struct { Permissions []Permissions `json:"permissions"` PriorityFactor PriorityFactor `json:"priorityFactor"` ResourceLimitsByPriorityClassName map[string]api.PriorityClassResourceLimits - Cordoned bool `json:"cordoned"` - Labels []string `json:"labels"` + Cordoned bool `json:"cordoned"` + Labels map[string]string `json:"labels"` } // NewQueue returns new Queue using the in parameter. Error is returned if @@ -50,9 +49,9 @@ func NewQueue(in *api.Queue) (Queue, error) { } // Queue labels must be Kubernetes-like key-value labels - for _, label := range in.Labels { - if len(strings.Split(label, "=")) != 2 { - return Queue{}, fmt.Errorf("queue label must be key-value, not %s", label) + for k, v := range in.Labels { + if k == "" || v == "" { + return Queue{}, fmt.Errorf("queue labels must not have an empty key or value, key: %s, value: %s", k, v) } } diff --git a/pkg/client/queue/queue_test.go b/pkg/client/queue/queue_test.go index 85cdaa00376..be6fcd79ac1 100644 --- a/pkg/client/queue/queue_test.go +++ b/pkg/client/queue/queue_test.go @@ -33,7 +33,7 @@ func TestQueueWithLabels(t *testing.T) { Name: "queue-a", PriorityFactor: 100, Permissions: []Permissions{}, - Labels: []string{"armadaproject.io/gpu-category=gang-user", "armadaproject.io/priority=critical"}, + Labels: map[string]string{"armadaproject.io/gpu-category": "gang-user", "armadaproject.io/priority": "critical"}, ResourceLimitsByPriorityClassName: make(map[string]api.PriorityClassResourceLimits), } queue2, err := NewQueue(queue1.ToAPI()) @@ -48,7 +48,7 @@ func TestQueueWithIncorrectLabels(t *testing.T) { queue1 := Queue{ Name: "queue-a", PriorityFactor: 100, - Labels: []string{"armadaproject.io/not-key-value"}, + Labels: map[string]string{"armadaproject.io/not-key-value": ""}, } _, err := NewQueue(queue1.ToAPI()) require.Error(t, err) diff --git a/third_party/airflow/armada/__init__.py b/third_party/airflow/armada/__init__.py new file mode 100644 index 00000000000..a0f32fe1618 --- /dev/null +++ b/third_party/airflow/armada/__init__.py @@ -0,0 +1,14 @@ +from airflow.serialization.serde import _extra_allowed + +_extra_allowed.add("armada.model.RunningJobContext") +_extra_allowed.add("armada.model.GrpcChannelArgs") + + +def get_provider_info(): + return { + "package-name": "armada-airflow", + "name": "Armada Airflow Operator", + "description": "Armada Airflow Operator.", + "extra-links": ["armada.operators.armada.LookoutLink"], + "versions": ["1.0.0"], + } diff --git a/third_party/airflow/armada/auth.py b/third_party/airflow/armada/auth.py index 16275dbc343..6bf45df780f 100644 --- a/third_party/airflow/armada/auth.py +++ b/third_party/airflow/armada/auth.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Protocol, Tuple +from typing import Protocol """ We use this interface for objects fetching Kubernetes auth tokens. Since it's used within the Trigger, it must be serialisable.""" @@ -6,5 +6,3 @@ class TokenRetriever(Protocol): def get_token(self) -> str: ... - - def serialize(self) -> Tuple[str, Dict[str, Any]]: ... diff --git a/third_party/airflow/armada/hooks.py b/third_party/airflow/armada/hooks.py new file mode 100644 index 00000000000..a894d09249e --- /dev/null +++ b/third_party/airflow/armada/hooks.py @@ -0,0 +1,129 @@ +import dataclasses +import json +import threading +from functools import cached_property +from typing import Dict, Optional + +import grpc +from airflow.exceptions import AirflowException +from airflow.serialization.serde import serialize +from airflow.utils.log.logging_mixin import LoggingMixin +from armada.model import GrpcChannelArgs +from armada_client.armada.job_pb2 import JobRunDetails +from armada_client.armada.submit_pb2 import JobSubmitRequestItem +from armada_client.client import ArmadaClient +from armada_client.typings import JobState +from pendulum import DateTime + +from .model import RunningJobContext + + +class ArmadaClientFactory: + CLIENTS_LOCK = threading.Lock() + CLIENTS: Dict[str, ArmadaClient] = {} + + @staticmethod + def client_for(args: GrpcChannelArgs) -> ArmadaClient: + """ + Armada clients, maintain GRPC connection to Armada API. + We cache them per channel args config in class level cache. + + Access to this method can be from multiple-threads. + """ + channel_args_key = json.dumps(serialize(args)) + with ArmadaClientFactory.CLIENTS_LOCK: + if channel_args_key not in ArmadaClientFactory.CLIENTS: + ArmadaClientFactory.CLIENTS[channel_args_key] = ArmadaClient( + channel=ArmadaClientFactory._create_channel(args) + ) + return ArmadaClientFactory.CLIENTS[channel_args_key] + + @staticmethod + def _create_channel(args: GrpcChannelArgs) -> grpc.Channel: + if args.auth is None: + return grpc.insecure_channel( + target=args.target, options=args.options, compression=args.compression + ) + + return grpc.secure_channel( + target=args.target, + options=args.options, + compression=args.compression, + credentials=grpc.composite_channel_credentials( + grpc.ssl_channel_credentials(), + grpc.metadata_call_credentials(args.auth), + ), + ) + + +class ArmadaHook(LoggingMixin): + def __init__(self, args: GrpcChannelArgs): + self.args = args + + @cached_property + def client(self): + return ArmadaClientFactory.client_for(self.args) + + def cancel_job(self, job_context: RunningJobContext) -> RunningJobContext: + try: + result = self.client.cancel_jobs( + queue=job_context.armada_queue, + job_set_id=job_context.job_set_id, + job_id=job_context.job_id, + ) + if len(list(result.cancelled_ids)) > 0: + self.log.info(f"Cancelled job with id {result.cancelled_ids}") + else: + self.log.warning(f"Failed to cancel job with id {job_context.job_id}") + except Exception as e: + self.log.warning(f"Failed to cancel job with id {job_context.job_id}: {e}") + finally: + return dataclasses.replace(job_context, job_state=JobState.CANCELLED.name) + + def submit_job( + self, queue: str, job_set_id: str, job_request: JobSubmitRequestItem + ) -> RunningJobContext: + resp = self.client.submit_jobs(queue, job_set_id, [job_request]) + num_responses = len(resp.job_response_items) + + # We submitted exactly one job to armada, so we expect a single response + if num_responses != 1: + raise AirflowException( + f"No valid received from Armada (expected 1 job to be created " + f"but got {num_responses})" + ) + job = resp.job_response_items[0] + + # Throw if armada told us we had submitted something bad + if job.error: + raise AirflowException(f"Error submitting job to Armada: {job.error}") + + return RunningJobContext(queue, job.job_id, job_set_id, DateTime.utcnow()) + + def refresh_context( + self, job_context: RunningJobContext, tracking_url: str + ) -> RunningJobContext: + response = self.client.get_job_status([job_context.job_id]) + state = JobState(response.job_states[job_context.job_id]) + if state != job_context.state: + self.log.info( + f"job {job_context.job_id} is in state: {state.name}. " + f"{tracking_url}" + ) + + cluster = job_context.cluster + if not cluster: + # Job is running / or completed already + if state == JobState.RUNNING or state.is_terminal(): + run_details = self._get_latest_job_run_details(job_context.job_id) + if run_details: + cluster = run_details.cluster + return dataclasses.replace(job_context, job_state=state.name, cluster=cluster) + + def _get_latest_job_run_details(self, job_id) -> Optional[JobRunDetails]: + job_details = self.client.get_job_details([job_id]).job_details[job_id] + if job_details and job_details.latest_run_id: + for run in job_details.job_runs: + if run.run_id == job_details.latest_run_id: + return run + return None diff --git a/third_party/airflow/armada/model.py b/third_party/airflow/armada/model.py index 00b9ab59800..91db62420e0 100644 --- a/third_party/airflow/armada/model.py +++ b/third_party/airflow/armada/model.py @@ -1,7 +1,11 @@ -import importlib -from typing import Any, Dict, Optional, Sequence, Tuple +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any, ClassVar, Dict, Optional, Sequence, Tuple import grpc +from armada_client.typings import JobState +from pendulum import DateTime """ This class exists so that we can retain our connection to the Armada Query API when using the deferrable Armada Airflow Operator. Airflow requires any state @@ -10,73 +14,55 @@ class GrpcChannelArgs: + __version__: ClassVar[int] = 1 + def __init__( self, target: str, options: Optional[Sequence[Tuple[str, Any]]] = None, compression: Optional[grpc.Compression] = None, auth: Optional[grpc.AuthMetadataPlugin] = None, - auth_details: Optional[Dict[str, Any]] = None, ): self.target = target self.options = options self.compression = compression - if auth: - self.auth = auth - elif auth_details: - classpath, kwargs = auth_details - module_path, class_name = classpath.rsplit( - ".", 1 - ) # Split the classpath to module and class name - module = importlib.import_module( - module_path - ) # Dynamically import the module - cls = getattr(module, class_name) # Get the class from the module - self.auth = cls( - **kwargs - ) # Instantiate the class with the deserialized kwargs - else: - self.auth = None + self.auth = auth def serialize(self) -> Dict[str, Any]: - auth_details = self.auth.serialize() if self.auth else None return { "target": self.target, "options": self.options, "compression": self.compression, - "auth_details": auth_details, + "auth": self.auth, } - def channel(self) -> grpc.Channel: - if self.auth is None: - return grpc.insecure_channel( - target=self.target, options=self.options, compression=self.compression - ) + @staticmethod + def deserialize(data: dict[str, Any], version: int) -> GrpcChannelArgs: + if version > GrpcChannelArgs.__version__: + raise TypeError("serialized version > class version") + return GrpcChannelArgs(**data) - return grpc.secure_channel( - target=self.target, - options=self.options, - compression=self.compression, - credentials=grpc.composite_channel_credentials( - grpc.ssl_channel_credentials(), - grpc.metadata_call_credentials(self.auth), - ), + def __eq__(self, value: object) -> bool: + if type(value) is not GrpcChannelArgs: + return False + return ( + self.target == value.target + and self.options == value.options + and self.compression == value.compression + and self.auth == value.auth ) - def aio_channel(self) -> grpc.aio.Channel: - if self.auth is None: - return grpc.aio.insecure_channel( - target=self.target, - options=self.options, - compression=self.compression, - ) - return grpc.aio.secure_channel( - target=self.target, - options=self.options, - compression=self.compression, - credentials=grpc.composite_channel_credentials( - grpc.ssl_channel_credentials(), - grpc.metadata_call_credentials(self.auth), - ), - ) +@dataclass(frozen=True) +class RunningJobContext: + armada_queue: str + job_id: str + job_set_id: str + submit_time: DateTime + cluster: Optional[str] = None + last_log_time: Optional[DateTime] = None + job_state: str = JobState.UNKNOWN.name + + @property + def state(self) -> JobState: + return JobState[self.job_state] diff --git a/third_party/airflow/armada/operators/armada.py b/third_party/airflow/armada/operators/armada.py index 7e365417ed3..3f06b99252b 100644 --- a/third_party/airflow/armada/operators/armada.py +++ b/third_party/airflow/armada/operators/armada.py @@ -17,125 +17,43 @@ # under the License. from __future__ import annotations -import asyncio +import dataclasses import datetime -import functools import os -import threading import time -from dataclasses import dataclass -from functools import cached_property -from typing import Any, AsyncIterator, Dict, Optional, Sequence, Tuple +from typing import Any, Callable, Dict, Optional, Sequence, Tuple import jinja2 from airflow.configuration import conf from airflow.exceptions import AirflowException -from airflow.models import BaseOperator -from airflow.triggers.base import BaseTrigger, TriggerEvent +from airflow.models import BaseOperator, BaseOperatorLink, XCom +from airflow.models.taskinstancekey import TaskInstanceKey +from airflow.serialization.serde import deserialize from airflow.utils.context import Context from airflow.utils.log.logging_mixin import LoggingMixin from armada.auth import TokenRetriever from armada.log_manager import KubernetesPodLogManager from armada.model import GrpcChannelArgs -from armada_client.armada.job_pb2 import JobRunDetails from armada_client.armada.submit_pb2 import JobSubmitRequestItem -from armada_client.client import ArmadaClient from armada_client.typings import JobState from google.protobuf.json_format import MessageToDict, ParseDict from pendulum import DateTime +from ..hooks import ArmadaHook +from ..model import RunningJobContext +from ..triggers import ArmadaPollJobTrigger +from ..utils import log_exceptions -def log_exceptions(method): - @functools.wraps(method) - def wrapper(self, *args, **kwargs): - try: - return method(self, *args, **kwargs) - except Exception as e: - if hasattr(self, "log") and hasattr(self.log, "error"): - self.log.error(f"Exception in {method.__name__}: {e}") - raise - - return wrapper - - -@dataclass(frozen=False) -class _RunningJobContext: - armada_queue: str - job_set_id: str - job_id: str - state: JobState = JobState.UNKNOWN - start_time: DateTime = DateTime.utcnow() - cluster: Optional[str] = None - last_log_time: Optional[DateTime] = None - - def serialize(self) -> tuple[str, Dict[str, Any]]: - return ( - "armada.operators.armada._RunningJobContext", - { - "armada_queue": self.armada_queue, - "job_set_id": self.job_set_id, - "job_id": self.job_id, - "state": self.state.value, - "start_time": self.start_time, - "cluster": self.cluster, - "last_log_time": self.last_log_time, - }, - ) - - def from_payload(payload: Dict[str, Any]) -> _RunningJobContext: - return _RunningJobContext( - armada_queue=payload["armada_queue"], - job_set_id=payload["job_set_id"], - job_id=payload["job_id"], - state=JobState(payload["state"]), - start_time=payload["start_time"], - cluster=payload["cluster"], - last_log_time=payload["last_log_time"], - ) - - -class _ArmadaPollJobTrigger(BaseTrigger): - def __init__(self, moment: datetime.timedelta, context: _RunningJobContext) -> None: - super().__init__() - self.moment = moment - self.context = context - - def serialize(self) -> tuple[str, dict[str, Any]]: - return ( - "armada.operators.armada._ArmadaPollJobTrigger", - {"moment": self.moment, "context": self.context.serialize()}, - ) - - def __eq__(self, value: object) -> bool: - if not isinstance(value, _ArmadaPollJobTrigger): - return False - return self.moment == value.moment and self.context == value.context - async def run(self) -> AsyncIterator[TriggerEvent]: - while self.moment > DateTime.utcnow(): - await asyncio.sleep(1) - yield TriggerEvent(self.context) +class LookoutLink(BaseOperatorLink): + name = "Lookout" + def get_link(self, operator: BaseOperator, *, ti_key: TaskInstanceKey): + task_state = XCom.get_value(ti_key=ti_key) + if not task_state: + return "" -class _ArmadaClientFactory: - CLIENTS_LOCK = threading.Lock() - CLIENTS: Dict[str, ArmadaClient] = {} - - @staticmethod - def client_for(args: GrpcChannelArgs) -> ArmadaClient: - """ - Armada clients, maintain GRPC connection to Armada API. - We cache them per channel args config in class level cache. - - Access to this method can be from multiple-threads. - """ - channel_args_key = str(args.serialize()) - with _ArmadaClientFactory.CLIENTS_LOCK: - if channel_args_key not in _ArmadaClientFactory.CLIENTS: - _ArmadaClientFactory.CLIENTS[channel_args_key] = ArmadaClient( - channel=args.channel() - ) - return _ArmadaClientFactory.CLIENTS[channel_args_key] + return task_state.get("armada_lookout_url", "") class ArmadaOperator(BaseOperator, LoggingMixin): @@ -146,7 +64,10 @@ class ArmadaOperator(BaseOperator, LoggingMixin): and handles job cancellation if the Airflow task is killed. """ + operator_extra_links = (LookoutLink(),) + template_fields: Sequence[str] = ("job_request", "job_set_prefix") + template_fields_renderers: Dict[str, str] = {"job_request": "py"} """ Initializes a new ArmadaOperator. @@ -158,7 +79,8 @@ class ArmadaOperator(BaseOperator, LoggingMixin): :param armada_queue: The name of the Armada queue to which the job will be submitted. :type armada_queue: str :param job_request: The job to be submitted to Armada. -:type job_request: JobSubmitRequestItem +:type job_request: JobSubmitRequestItem | \ +Callable[[Context, jinja2.Environment], JobSubmitRequestItem] :param job_set_prefix: A string to prepend to the jobSet name. :type job_set_prefix: Optional[str] :param lookout_url_template: Template for creating lookout links. If not specified @@ -177,6 +99,8 @@ class ArmadaOperator(BaseOperator, LoggingMixin): :param job_acknowledgement_timeout: The timeout in seconds to wait for a job to be acknowledged by Armada. :type job_acknowledgement_timeout: int +:param dry_run: Run Operator in dry-run mode - render Armada request and terminate. +:type dry_run: bool :param kwargs: Additional keyword arguments to pass to the BaseOperator. """ @@ -185,7 +109,10 @@ def __init__( name: str, channel_args: GrpcChannelArgs, armada_queue: str, - job_request: JobSubmitRequestItem, + job_request: ( + JobSubmitRequestItem + | Callable[[Context, jinja2.Environment], JobSubmitRequestItem] + ), job_set_prefix: Optional[str] = "", lookout_url_template: Optional[str] = None, poll_interval: int = 30, @@ -195,6 +122,7 @@ def __init__( "operators", "default_deferrable", fallback=True ), job_acknowledgement_timeout: int = 5 * 60, + dry_run: bool = False, **kwargs, ) -> None: super().__init__(**kwargs) @@ -210,6 +138,7 @@ def __init__( self.k8s_token_retriever = k8s_token_retriever self.deferrable = deferrable self.job_acknowledgement_timeout = job_acknowledgement_timeout + self.dry_run = dry_run self.job_context = None if self.container_logs and self.k8s_token_retriever is None: @@ -226,32 +155,31 @@ def execute(self, context) -> None: :param context: The execution context provided by Airflow. :type context: Context """ - # We take the job_set_id from Airflow's run_id. This means that all jobs in the - # dag will be in the same jobset. + # We take the job_set_id from Airflow's run_id. + # So all jobs in the dag will be in the same jobset. self.job_set_id = f"{self.job_set_prefix}{context['run_id']}" self._annotate_job_request(context, self.job_request) - # Submit job or reattach to previously submitted job. We always do this - # synchronously. - job_id = self._reattach_or_submit_job( - context, self.armada_queue, self.job_set_id, self.job_request - ) + if self.dry_run: + self.log.info( + f"Running in dry_run mode. job_set_id: {self.job_set_id} \n" + f"{self.job_request}" + ) + return - # Wait until finished - self.job_context = _RunningJobContext( - self.armada_queue, self.job_set_id, job_id, start_time=DateTime.utcnow() + # Submit job or reattach to previously submitted job. + # Always do this synchronously. + self.job_context = self._reattach_or_submit_job( + context, self.job_set_id, self.job_request ) - if self.deferrable: - self._deffered_yield(self.job_context) - else: - self._poll_for_termination(self.job_context) + self._poll_for_termination() - @cached_property - def client(self) -> ArmadaClient: - return _ArmadaClientFactory.client_for(self.channel_args) + @property + def hook(self) -> ArmadaHook: + return ArmadaHook(self.channel_args) - @cached_property + @property def pod_manager(self) -> KubernetesPodLogManager: return KubernetesPodLogManager(token_retriever=self.k8s_token_retriever) @@ -270,120 +198,107 @@ def render_template_fields( :param context: Airflow Context dict wi1th values to apply on content :param jinja_env: jinja’s environment to use for rendering. """ + if callable(self.job_request): + if not jinja_env: + jinja_env = self.get_template_env() + self.job_request = self.job_request(context, jinja_env) + self.job_request = MessageToDict( self.job_request, preserving_proto_field_name=True ) super().render_template_fields(context, jinja_env) self.job_request = ParseDict(self.job_request, JobSubmitRequestItem()) - def _cancel_job(self, job_context) -> None: - try: - result = self.client.cancel_jobs( - queue=job_context.armada_queue, - job_set_id=job_context.job_set_id, - job_id=job_context.job_id, - ) - if len(list(result.cancelled_ids)) > 0: - self.log.info(f"Cancelled job with id {result.cancelled_ids}") - else: - self.log.warning(f"Failed to cancel job with id {job_context.job_id}") - except Exception as e: - self.log.warning(f"Failed to cancel job with id {job_context.job_id}: {e}") - def on_kill(self) -> None: if self.job_context is not None: self.log.info( f"on_kill called, " - "cancelling job with id {self.job_context.job_id} in queue " + f"cancelling job with id {self.job_context.job_id} in queue " f"{self.job_context.armada_queue}" ) - self._cancel_job(self.job_context) + self.hook.cancel_job(self.job_context) + self.job_context = None - def _trigger_tracking_message(self, job_id: str): + def lookout_url(self, job_id): if self.lookout_url_template: - return ( - f"Job details available at " - f'{self.lookout_url_template.replace("", job_id)}' - ) + return self.lookout_url_template.replace("", job_id) + return None + + def _trigger_tracking_message(self, job_id): + url = self.lookout_url(job_id) + if url: + return f"Job details available at {url}" return "" - def _deffered_yield(self, context: _RunningJobContext): - self.defer( - timeout=self.execution_timeout, - trigger=_ArmadaPollJobTrigger( - DateTime.utcnow() + datetime.timedelta(seconds=self.poll_interval), - context, - ), - method_name="_deffered_poll_for_termination", - ) + def _yield(self): + if self.deferrable: + self.defer( + timeout=self.execution_timeout, + trigger=ArmadaPollJobTrigger( + DateTime.utcnow() + datetime.timedelta(seconds=self.poll_interval), + self.job_context, + self.channel_args, + ), + method_name="_trigger_reentry", + ) + else: + time.sleep(self.poll_interval) - @log_exceptions - def _deffered_poll_for_termination( + def _trigger_reentry( self, context: Context, event: Tuple[str, Dict[str, Any]] ) -> None: - job_run_context = _RunningJobContext.from_payload(event[1]) - while job_run_context.state.is_active(): - job_run_context = self._check_job_status_and_fetch_logs(job_run_context) - if job_run_context.state.is_active(): - self._deffered_yield(job_run_context) - - self._running_job_terminated(job_run_context) + self.job_context = deserialize(event) + self._poll_for_termination() def _reattach_or_submit_job( self, context: Context, - queue: str, job_set_id: str, job_request: JobSubmitRequestItem, - ) -> str: + ) -> RunningJobContext: + # Try to re-initialize job_context from xcom if it exist. ti = context["ti"] - existing_id = ti.xcom_pull( + existing_run = ti.xcom_pull( dag_id=ti.dag_id, task_ids=ti.task_id, key=f"{ti.try_number}" ) - if existing_id is not None: + if existing_run is not None: self.log.info( - f"Attached to existing job with id {existing_id['armada_job_id']}." - f" {self._trigger_tracking_message(existing_id['armada_job_id'])}" + f"Attached to existing job with id {existing_run['armada_job_id']}." + f" {self._trigger_tracking_message(existing_run['armada_job_id'])}" ) - return existing_id["armada_job_id"] - - job_id = self._submit_job(queue, job_set_id, job_request) - self.log.info( - f"Submitted job with id {job_id}. {self._trigger_tracking_message(job_id)}" - ) - ti.xcom_push(key=f"{ti.try_number}", value={"armada_job_id": job_id}) - return job_id - - def _submit_job( - self, queue: str, job_set_id: str, job_request: JobSubmitRequestItem - ) -> str: - resp = self.client.submit_jobs(queue, job_set_id, [job_request]) - num_responses = len(resp.job_response_items) - - # We submitted exactly one job to armada, so we expect a single response - if num_responses != 1: - raise AirflowException( - f"No valid received from Armada (expected 1 job to be created " - f"but got {num_responses}" + return RunningJobContext( + armada_queue=existing_run["armada_queue"], + job_id=existing_run["armada_job_id"], + job_set_id=existing_run["armada_job_set_id"], + submit_time=DateTime.utcnow(), ) - job = resp.job_response_items[0] - - # Throw if armada told us we had submitted something bad - if job.error: - raise AirflowException(f"Error submitting job to Armada: {job.error}") - return job.job_id + # We haven't got a running job, submit a new one and persist state to xcom. + ctx = self.hook.submit_job(self.armada_queue, job_set_id, job_request) + tracking_msg = self._trigger_tracking_message(ctx.job_id) + self.log.info(f"Submitted job with id {ctx.job_id}. {tracking_msg}") + + ti.xcom_push( + key=f"{ti.try_number}", + value={ + "armada_queue": ctx.armada_queue, + "armada_job_id": ctx.job_id, + "armada_job_set_id": ctx.job_set_id, + "armada_lookout_url": self.lookout_url(ctx.job_id), + }, + ) + return ctx - def _poll_for_termination(self, context: _RunningJobContext) -> None: - while context.state.is_active(): - context = self._check_job_status_and_fetch_logs(context) - if context.state.is_active(): - time.sleep(self.poll_interval) + def _poll_for_termination(self) -> None: + while self.job_context.state.is_active(): + self._check_job_status_and_fetch_logs() + if self.job_context.state.is_active(): + self._yield() - self._running_job_terminated(context) + self._running_job_terminated(self.job_context) - def _running_job_terminated(self, context: _RunningJobContext): + def _running_job_terminated(self, context: RunningJobContext): self.log.info( f"job {context.job_id} terminated with state: {context.state.name}" ) @@ -393,57 +308,43 @@ def _running_job_terminated(self, context: _RunningJobContext): f"Final status was {context.state.name}" ) - @log_exceptions - def _check_job_status_and_fetch_logs( - self, context: _RunningJobContext - ) -> _RunningJobContext: - response = self.client.get_job_status([context.job_id]) - state = JobState(response.job_states[context.job_id]) - if state != context.state: - self.log.info( - f"job {context.job_id} is in state: {state.name}. " - f"{self._trigger_tracking_message(context.job_id)}" - ) - context.state = state - - if context.state == JobState.UNKNOWN: + def _not_acknowledged_within_timeout(self) -> bool: + if self.job_context.state == JobState.UNKNOWN: if ( - DateTime.utcnow().diff(context.start_time).in_seconds() + DateTime.utcnow().diff(self.job_context.submit_time).in_seconds() > self.job_acknowledgement_timeout ): - self.log.info( - f"Job {context.job_id} not acknowledged by the Armada within " - f"timeout ({self.job_acknowledgement_timeout}), terminating" - ) - self._cancel_job(context) - context.state = JobState.CANCELLED - return context + return True + return False - if self.container_logs and not context.cluster: - if context.state == JobState.RUNNING or context.state.is_terminal(): - run_details = self._get_latest_job_run_details(context.job_id) - context.cluster = run_details.cluster + @log_exceptions + def _check_job_status_and_fetch_logs(self) -> None: + self.job_context = self.hook.refresh_context( + self.job_context, self._trigger_tracking_message(self.job_context.job_id) + ) - if context.cluster: + if self._not_acknowledged_within_timeout(): + self.log.info( + f"Job {self.job_context.job_id} not acknowledged by the Armada within " + f"timeout ({self.job_acknowledgement_timeout}), terminating" + ) + self.job_context = self.hook.cancel_job(self.job_context) + return + + if self.job_context.cluster and self.container_logs: try: - context.last_log_time = self.pod_manager.fetch_container_logs( - k8s_context=context.cluster, + last_log_time = self.pod_manager.fetch_container_logs( + k8s_context=self.job_context.cluster, namespace=self.job_request.namespace, - pod=f"armada-{context.job_id}-0", + pod=f"armada-{self.job_context.job_id}-0", container=self.container_logs, - since_time=context.last_log_time, + since_time=self.job_context.last_log_time, + ) + self.job_context = dataclasses.replace( + self.job_context, last_log_time=last_log_time ) except Exception as e: self.log.warning(f"Error fetching logs {e}") - return context - - def _get_latest_job_run_details(self, job_id) -> Optional[JobRunDetails]: - job_details = self.client.get_job_details([job_id]).job_details[job_id] - if job_details and job_details.latest_run_id: - for run in job_details.job_runs: - if run.run_id == job_details.latest_run_id: - return run - return None @staticmethod def _annotate_job_request(context, request: JobSubmitRequestItem): diff --git a/third_party/airflow/armada/plugin.py b/third_party/airflow/armada/plugin.py new file mode 100644 index 00000000000..c7694566914 --- /dev/null +++ b/third_party/airflow/armada/plugin.py @@ -0,0 +1,10 @@ +from airflow.plugins_manager import AirflowPlugin + +from .armada.operators.armada import LookoutLink + + +class AirflowExtraLinkPlugin(AirflowPlugin): + name = "extra_link_plugin" + operator_extra_links = [ + LookoutLink(), + ] diff --git a/third_party/airflow/armada/triggers.py b/third_party/airflow/armada/triggers.py new file mode 100644 index 00000000000..2ea44e16c0c --- /dev/null +++ b/third_party/airflow/armada/triggers.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +import asyncio +from datetime import timedelta +from typing import Any, AsyncIterator, ClassVar, Dict + +from airflow.exceptions import AirflowException +from airflow.models.taskinstance import TaskInstance +from airflow.serialization.serde import deserialize, serialize +from airflow.triggers.base import BaseTrigger, TriggerEvent +from airflow.utils.session import provide_session +from airflow.utils.state import TaskInstanceState +from pendulum import DateTime +from sqlalchemy.orm.session import Session + +from .hooks import ArmadaHook +from .model import GrpcChannelArgs, RunningJobContext +from .utils import log_exceptions + + +class ArmadaPollJobTrigger(BaseTrigger): + __version__: ClassVar[int] = 1 + + @log_exceptions + def __init__( + self, + moment: timedelta, + context: RunningJobContext | tuple[str, Dict[str, Any]], + channel_args: GrpcChannelArgs | tuple[str, Dict[str, Any]], + ) -> None: + super().__init__() + + self.moment = moment + if type(context) is RunningJobContext: + self.context = context + else: + self.context = deserialize(context) + + if type(channel_args) is GrpcChannelArgs: + self.channel_args = channel_args + else: + self.channel_args = deserialize(channel_args) + + @log_exceptions + def serialize(self) -> tuple[str, dict[str, Any]]: + return ( + "armada.triggers.ArmadaPollJobTrigger", + { + "moment": self.moment, + "context": serialize(self.context), + "channel_args": serialize(self.channel_args), + }, + ) + + @log_exceptions + @provide_session + def get_task_instance(self, session: Session) -> TaskInstance: + """ + Get the task instance for the current task. + :param session: Sqlalchemy session + """ + query = session.query(TaskInstance).filter( + TaskInstance.dag_id == self.task_instance.dag_id, + TaskInstance.task_id == self.task_instance.task_id, + TaskInstance.run_id == self.task_instance.run_id, + TaskInstance.map_index == self.task_instance.map_index, + ) + task_instance = query.one_or_none() + if task_instance is None: + raise AirflowException( + "TaskInstance with dag_id: %s,task_id: %s, " + "run_id: %s and map_index: %s is not found", + self.task_instance.dag_id, + self.task_instance.task_id, + self.task_instance.run_id, + self.task_instance.map_index, + ) + return task_instance + + def should_cancel_job(self) -> bool: + """ + We only want to cancel jobs when task is being marked Failed/Succeeded. + """ + # Database query is needed to get the latest state of the task instance. + task_instance = self.get_task_instance() # type: ignore[call-arg] + return task_instance.state != TaskInstanceState.DEFERRED + + def __eq__(self, value: object) -> bool: + if not isinstance(value, ArmadaPollJobTrigger): + return False + return ( + self.moment == value.moment + and self.context == value.context + and self.channel_args == value.channel_args + ) + + @property + def hook(self) -> ArmadaHook: + return ArmadaHook(self.channel_args) + + @log_exceptions + async def run(self) -> AsyncIterator[TriggerEvent]: + try: + while self.moment > DateTime.utcnow(): + await asyncio.sleep(1) + yield TriggerEvent(serialize(self.context)) + except asyncio.CancelledError: + if self.should_cancel_job(): + self.hook.cancel_job(self.context) + raise diff --git a/third_party/airflow/armada/utils.py b/third_party/airflow/armada/utils.py new file mode 100644 index 00000000000..e700a1bbc5e --- /dev/null +++ b/third_party/airflow/armada/utils.py @@ -0,0 +1,14 @@ +import functools + + +def log_exceptions(method): + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + try: + return method(self, *args, **kwargs) + except Exception as e: + if hasattr(self, "log") and hasattr(self.log, "error"): + self.log.error(f"Exception in {method.__name__}: {e}") + raise + + return wrapper diff --git a/third_party/airflow/docs/source/conf.py b/third_party/airflow/docs/source/conf.py index 10d3949aee8..a7e2f5a75bb 100644 --- a/third_party/airflow/docs/source/conf.py +++ b/third_party/airflow/docs/source/conf.py @@ -13,14 +13,14 @@ import os import sys -sys.path.insert(0, os.path.abspath('../..')) +sys.path.insert(0, os.path.abspath("../..")) # -- Project information ----------------------------------------------------- -project = 'python-armadaairflowoperator' -copyright = '2022 Armada Project' -author = 'armada@armadaproject.io' +project = "python-armadaairflowoperator" +copyright = "2022 Armada Project" +author = "armada@armadaproject.io" # -- General configuration --------------------------------------------------- @@ -28,12 +28,12 @@ # Jekyll is the style of markdown used by github pages; using # sphinx_jekyll_builder here allows us to generate docs as # markdown files. -extensions = ['sphinx.ext.autodoc', 'sphinx_jekyll_builder'] +extensions = ["sphinx.ext.autodoc", "sphinx_jekyll_builder"] # This setting puts information about typing in the description section instead # of in the function signature directly. This makes rendered content look much # better in our gh-pages template that renders the generated markdown. -autodoc_typehints = 'description' +autodoc_typehints = "description" # Add any paths that contain templates here, relative to this directory. templates_path = [] @@ -49,7 +49,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' +html_theme = "alabaster" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/third_party/airflow/pyproject.toml b/third_party/airflow/pyproject.toml index 8f8fb538a57..3c8471bde44 100644 --- a/third_party/airflow/pyproject.toml +++ b/third_party/airflow/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "armada_airflow" -version = "1.0.1" +version = "1.0.2" description = "Armada Airflow Operator" readme='README.md' authors = [{name = "Armada-GROSS", email = "armada@armadaproject.io"}] @@ -31,6 +31,9 @@ test = ["pytest==7.3.1", "coverage==7.3.2", "pytest-asyncio==0.21.1", # note(JayF): sphinx-jekyll-builder was broken by sphinx-markdown-builder 0.6 -- so pin to 0.5.5 docs = ["sphinx==7.1.2", "sphinx-jekyll-builder==0.3.0", "sphinx-toolbox==3.2.0b1", "sphinx-markdown-builder==0.5.5"] +[project.entry-points.apache_airflow_provider] +provider_info = "armada.__init__:get_provider_info" + [project.urls] repository='https://github.com/armadaproject/armada' @@ -39,7 +42,7 @@ include = ["armada_airflow*"] [tool.black] line-length = 88 -target-version = ['py310'] +target-version = ['py38', 'py39', 'py310'] include = ''' /( armada diff --git a/third_party/airflow/test/integration/test_airflow_operator_logic.py b/third_party/airflow/test/integration/test_airflow_operator_logic.py index 4bc3c43418e..594d3d5eaec 100644 --- a/third_party/airflow/test/integration/test_airflow_operator_logic.py +++ b/third_party/airflow/test/integration/test_airflow_operator_logic.py @@ -85,9 +85,7 @@ def sleep_pod(image: str): ] -def test_success_job( - client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs, mocker -): +def test_success_job(client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs): job_set_name = f"test-{uuid.uuid1()}" job = client.submit_jobs( queue=DEFAULT_QUEUE, @@ -96,10 +94,11 @@ def test_success_job( ) job_id = job.job_response_items[0].job_id - mocker.patch( - "armada.operators.armada.ArmadaOperator._reattach_or_submit_job", - return_value=job_id, - ) + context["ti"].xcom_pull.return_value = { + "armada_queue": DEFAULT_QUEUE, + "armada_job_id": job_id, + "armada_job_set_id": job_set_name, + } operator = ArmadaOperator( task_id=DEFAULT_TASK_ID, @@ -113,13 +112,11 @@ def test_success_job( operator.execute(context) - response = operator.client.get_job_status([job_id]) + response = client.get_job_status([job_id]) assert JobState(response.job_states[job_id]) == JobState.SUCCEEDED -def test_bad_job( - client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs, mocker -): +def test_bad_job(client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs): job_set_name = f"test-{uuid.uuid1()}" job = client.submit_jobs( queue=DEFAULT_QUEUE, @@ -128,10 +125,11 @@ def test_bad_job( ) job_id = job.job_response_items[0].job_id - mocker.patch( - "armada.operators.armada.ArmadaOperator._reattach_or_submit_job", - return_value=job_id, - ) + context["ti"].xcom_pull.return_value = { + "armada_queue": DEFAULT_QUEUE, + "armada_job_id": job_id, + "armada_job_set_id": job_set_name, + } operator = ArmadaOperator( task_id=DEFAULT_TASK_ID, @@ -149,7 +147,7 @@ def test_bad_job( "Operator did not raise AirflowException on job failure as expected" ) except AirflowException: # Expected - response = operator.client.get_job_status([job_id]) + response = client.get_job_status([job_id]) assert JobState(response.job_states[job_id]) == JobState.FAILED except Exception as e: pytest.fail( @@ -159,7 +157,7 @@ def test_bad_job( def success_job( - task_number: int, context: Any, channel_args: GrpcChannelArgs + task_number: int, context: Any, channel_args: GrpcChannelArgs, client: ArmadaClient ) -> JobState: operator = ArmadaOperator( task_id=f"{DEFAULT_TASK_ID}_{task_number}", @@ -173,7 +171,7 @@ def success_job( operator.execute(context) - response = operator.client.get_job_status([operator.job_id]) + response = client.get_job_status([operator.job_id]) return JobState(response.job_states[operator.job_id]) @@ -182,7 +180,9 @@ def test_parallel_execution( client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs, mocker ): threads = [] - success_job(task_number=0, context=context, channel_args=channel_args) + success_job( + task_number=0, context=context, channel_args=channel_args, client=client + ) for task_number in range(5): t = threading.Thread( target=success_job, args=[task_number, context, channel_args] @@ -199,7 +199,9 @@ def test_parallel_execution_large( client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs, mocker ): threads = [] - success_job(task_number=0, context=context, channel_args=channel_args) + success_job( + task_number=0, context=context, channel_args=channel_args, client=client + ) for task_number in range(80): t = threading.Thread( target=success_job, args=[task_number, context, channel_args] @@ -216,7 +218,9 @@ def test_parallel_execution_huge( client: ArmadaClient, context: Any, channel_args: GrpcChannelArgs, mocker ): threads = [] - success_job(task_number=0, context=context, channel_args=channel_args) + success_job( + task_number=0, context=context, channel_args=channel_args, client=client + ) for task_number in range(500): t = threading.Thread( target=success_job, args=[task_number, context, channel_args] diff --git a/third_party/airflow/test/operators/test_armada.py b/third_party/airflow/test/operators/test_armada.py deleted file mode 100644 index 85129000ad1..00000000000 --- a/third_party/airflow/test/operators/test_armada.py +++ /dev/null @@ -1,324 +0,0 @@ -import unittest -from datetime import timedelta -from math import ceil -from unittest.mock import MagicMock, PropertyMock, patch - -from airflow.exceptions import AirflowException -from armada.model import GrpcChannelArgs -from armada.operators.armada import ( - ArmadaOperator, - _ArmadaPollJobTrigger, - _RunningJobContext, -) -from armada_client.armada import job_pb2, submit_pb2 -from armada_client.armada.submit_pb2 import JobSubmitRequestItem -from armada_client.k8s.io.api.core.v1 import generated_pb2 as core_v1 -from armada_client.k8s.io.apimachinery.pkg.api.resource import ( - generated_pb2 as api_resource, -) -from armada_client.typings import JobState -from pendulum import UTC, DateTime - -DEFAULT_CURRENT_TIME = DateTime(2024, 8, 7, tzinfo=UTC) -DEFAULT_JOB_ID = "test_job" -DEFAULT_TASK_ID = "test_task_1" -DEFAULT_DAG_ID = "test_dag_1" -DEFAULT_RUN_ID = "test_run_1" -DEFAULT_QUEUE = "test_queue_1" -DEFAULT_POLLING_INTERVAL = 30 -DEFAULT_JOB_ACKNOWLEDGEMENT_TIMEOUT = 5 * 60 - - -class TestArmadaOperator(unittest.TestCase): - def setUp(self): - # Set up a mock context - mock_ti = MagicMock() - mock_ti.task_id = DEFAULT_TASK_ID - mock_dag = MagicMock() - mock_dag.dag_id = DEFAULT_DAG_ID - self.context = { - "ti": mock_ti, - "run_id": DEFAULT_RUN_ID, - "dag": mock_dag, - } - - @patch("time.sleep", return_value=None) - @patch("armada.operators.armada.ArmadaOperator.client", new_callable=PropertyMock) - def test_execute(self, mock_client_fn, _): - test_cases = [ - { - "name": "Job Succeeds", - "statuses": [submit_pb2.RUNNING, submit_pb2.SUCCEEDED], - "success": True, - }, - { - "name": "Job Failed", - "statuses": [submit_pb2.RUNNING, submit_pb2.FAILED], - "success": False, - }, - { - "name": "Job cancelled", - "statuses": [submit_pb2.RUNNING, submit_pb2.CANCELLED], - "success": False, - }, - { - "name": "Job preempted", - "statuses": [submit_pb2.RUNNING, submit_pb2.PREEMPTED], - "success": False, - }, - { - "name": "Job Succeeds but takes a lot of transitions", - "statuses": [ - submit_pb2.SUBMITTED, - submit_pb2.RUNNING, - submit_pb2.RUNNING, - submit_pb2.RUNNING, - submit_pb2.RUNNING, - submit_pb2.RUNNING, - submit_pb2.SUCCEEDED, - ], - "success": True, - }, - ] - - for test_case in test_cases: - with self.subTest(test_case=test_case["name"]): - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=JobSubmitRequestItem(), - task_id=DEFAULT_TASK_ID, - ) - - # Set up Mock Armada - mock_client = MagicMock() - mock_client.submit_jobs.return_value = submit_pb2.JobSubmitResponse( - job_response_items=[ - submit_pb2.JobSubmitResponseItem(job_id=DEFAULT_JOB_ID) - ] - ) - - mock_client.get_job_status.side_effect = [ - job_pb2.JobStatusResponse(job_states={DEFAULT_JOB_ID: x}) - for x in test_case["statuses"] - ] - - mock_client_fn.return_value = mock_client - self.context["ti"].xcom_pull.return_value = None - - try: - operator.execute(self.context) - self.assertTrue(test_case["success"]) - except AirflowException: - self.assertFalse(test_case["success"]) - return - - self.assertEqual(mock_client.submit_jobs.call_count, 1) - self.assertEqual( - mock_client.get_job_status.call_count, len(test_case["statuses"]) - ) - - @patch("time.sleep", return_value=None) - @patch( - "armada.operators.armada.ArmadaOperator._cancel_job", new_callable=PropertyMock - ) - @patch("armada.operators.armada.ArmadaOperator.client", new_callable=PropertyMock) - def test_unacknowledged_results_in_on_kill(self, mock_client_fn, mock_on_kill, _): - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=JobSubmitRequestItem(), - task_id=DEFAULT_TASK_ID, - deferrable=False, - job_acknowledgement_timeout=-1, - ) - - # Set up Mock Armada - mock_client = MagicMock() - mock_client.submit_jobs.return_value = submit_pb2.JobSubmitResponse( - job_response_items=[submit_pb2.JobSubmitResponseItem(job_id=DEFAULT_JOB_ID)] - ) - mock_client_fn.return_value = mock_client - mock_client.get_job_status.side_effect = [ - job_pb2.JobStatusResponse(job_states={DEFAULT_JOB_ID: x}) - for x in [submit_pb2.UNKNOWN, submit_pb2.UNKNOWN] - ] - - self.context["ti"].xcom_pull.return_value = None - with self.assertRaises(AirflowException): - operator.execute(self.context) - self.assertEqual(mock_on_kill.call_count, 1) - - """We call on_kill by triggering the job unacknowledged timeout""" - - @patch("time.sleep", return_value=None) - @patch("armada.operators.armada.ArmadaOperator.client", new_callable=PropertyMock) - def test_on_kill_cancels_job(self, mock_client_fn, _): - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=JobSubmitRequestItem(), - task_id=DEFAULT_TASK_ID, - deferrable=False, - job_acknowledgement_timeout=-1, - ) - - # Set up Mock Armada - mock_client = MagicMock() - mock_client.submit_jobs.return_value = submit_pb2.JobSubmitResponse( - job_response_items=[submit_pb2.JobSubmitResponseItem(job_id=DEFAULT_JOB_ID)] - ) - mock_client_fn.return_value = mock_client - mock_client.get_job_status.side_effect = [ - job_pb2.JobStatusResponse(job_states={DEFAULT_JOB_ID: x}) - for x in [ - submit_pb2.UNKNOWN - for _ in range( - 1 - + ceil( - DEFAULT_JOB_ACKNOWLEDGEMENT_TIMEOUT / DEFAULT_POLLING_INTERVAL - ) - ) - ] - ] - - self.context["ti"].xcom_pull.return_value = None - with self.assertRaises(AirflowException): - operator.execute(self.context) - self.assertEqual(mock_client.cancel_jobs.call_count, 1) - - @patch("time.sleep", return_value=None) - @patch("armada.operators.armada.ArmadaOperator.client", new_callable=PropertyMock) - def test_job_reattaches(self, mock_client_fn, _): - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=JobSubmitRequestItem(), - task_id=DEFAULT_TASK_ID, - deferrable=False, - job_acknowledgement_timeout=10, - ) - - # Set up Mock Armada - mock_client = MagicMock() - mock_client.get_job_status.side_effect = [ - job_pb2.JobStatusResponse(job_states={DEFAULT_JOB_ID: x}) - for x in [ - submit_pb2.SUCCEEDED - for _ in range( - 1 - + ceil( - DEFAULT_JOB_ACKNOWLEDGEMENT_TIMEOUT / DEFAULT_POLLING_INTERVAL - ) - ) - ] - ] - mock_client_fn.return_value = mock_client - self.context["ti"].xcom_pull.return_value = {"armada_job_id": DEFAULT_JOB_ID} - - operator.execute(self.context) - self.assertEqual(mock_client.submit_jobs.call_count, 0) - - -class TestArmadaOperatorDeferrable(unittest.IsolatedAsyncioTestCase): - def setUp(self): - # Set up a mock context - mock_ti = MagicMock() - mock_ti.task_id = DEFAULT_TASK_ID - mock_dag = MagicMock() - mock_dag.dag_id = DEFAULT_DAG_ID - self.context = { - "ti": mock_ti, - "run_id": DEFAULT_RUN_ID, - "dag": mock_dag, - } - - @patch("pendulum.DateTime.utcnow") - @patch("armada.operators.armada.ArmadaOperator.defer") - @patch("armada.operators.armada.ArmadaOperator.client", new_callable=PropertyMock) - def test_execute_deferred(self, mock_client_fn, mock_defer_fn, mock_datetime_now): - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=JobSubmitRequestItem(), - task_id=DEFAULT_TASK_ID, - deferrable=True, - ) - - mock_datetime_now.return_value = DEFAULT_CURRENT_TIME - - # Set up Mock Armada - mock_client = MagicMock() - mock_client.submit_jobs.return_value = submit_pb2.JobSubmitResponse( - job_response_items=[submit_pb2.JobSubmitResponseItem(job_id=DEFAULT_JOB_ID)] - ) - mock_client_fn.return_value = mock_client - self.context["ti"].xcom_pull.return_value = None - - operator.execute(self.context) - self.assertEqual(mock_client.submit_jobs.call_count, 1) - mock_defer_fn.assert_called_with( - timeout=operator.execution_timeout, - trigger=_ArmadaPollJobTrigger( - moment=DEFAULT_CURRENT_TIME + timedelta(seconds=operator.poll_interval), - context=_RunningJobContext( - armada_queue=DEFAULT_QUEUE, - job_set_id=operator.job_set_id, - job_id=DEFAULT_JOB_ID, - state=JobState.UNKNOWN, - start_time=DEFAULT_CURRENT_TIME, - cluster=None, - last_log_time=None, - ), - ), - method_name="_deffered_poll_for_termination", - ) - - def test_templating(self): - """Tests templating for both the job_prefix and the pod spec""" - prefix = "{{ run_id }}" - pod_arg = "{{ run_id }}" - - pod = core_v1.PodSpec( - containers=[ - core_v1.Container( - name="sleep", - image="alpine:3.20.2", - args=[pod_arg], - securityContext=core_v1.SecurityContext(runAsUser=1000), - resources=core_v1.ResourceRequirements( - requests={ - "cpu": api_resource.Quantity(string="120m"), - "memory": api_resource.Quantity(string="510Mi"), - }, - limits={ - "cpu": api_resource.Quantity(string="120m"), - "memory": api_resource.Quantity(string="510Mi"), - }, - ), - ) - ], - ) - job = JobSubmitRequestItem(priority=1, pod_spec=pod, namespace="armada") - - operator = ArmadaOperator( - name="test", - channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), - armada_queue=DEFAULT_QUEUE, - job_request=job, - job_set_prefix=prefix, - task_id=DEFAULT_TASK_ID, - deferrable=True, - ) - - operator.render_template_fields(self.context) - - self.assertEqual(operator.job_set_prefix, "test_run_1") - self.assertEqual( - operator.job_request.pod_spec.containers[0].args[0], "test_run_1" - ) diff --git a/third_party/airflow/test/__init__.py b/third_party/airflow/test/unit/__init__.py similarity index 100% rename from third_party/airflow/test/__init__.py rename to third_party/airflow/test/unit/__init__.py diff --git a/third_party/airflow/test/operators/__init__.py b/third_party/airflow/test/unit/operators/__init__.py similarity index 100% rename from third_party/airflow/test/operators/__init__.py rename to third_party/airflow/test/unit/operators/__init__.py diff --git a/third_party/airflow/test/unit/operators/test_armada.py b/third_party/airflow/test/unit/operators/test_armada.py new file mode 100644 index 00000000000..d2aab33cce4 --- /dev/null +++ b/third_party/airflow/test/unit/operators/test_armada.py @@ -0,0 +1,262 @@ +import dataclasses +from datetime import timedelta +from typing import Optional +from unittest.mock import MagicMock, patch + +import pytest +from airflow.exceptions import AirflowException, TaskDeferred +from armada.model import GrpcChannelArgs, RunningJobContext +from armada.operators.armada import ArmadaOperator +from armada.triggers import ArmadaPollJobTrigger +from armada_client.armada.submit_pb2 import JobSubmitRequestItem +from armada_client.typings import JobState +from pendulum import UTC, DateTime + +DEFAULT_CURRENT_TIME = DateTime(2024, 8, 7, tzinfo=UTC) +DEFAULT_JOB_ID = "test_job" +DEFAULT_TASK_ID = "test_task_1" +DEFAULT_JOB_SET = "prefix-test_run_1" +DEFAULT_QUEUE = "test_queue_1" +DEFAULT_CLUSTER = "cluster-1" + + +def default_hook() -> MagicMock: + mock = MagicMock() + job_context = running_job_context() + mock.submit_job.return_value = job_context + mock.refresh_context.return_value = dataclasses.replace( + job_context, job_state=JobState.SUCCEEDED.name, cluster=DEFAULT_CLUSTER + ) + mock.cancel_job.return_value = dataclasses.replace( + job_context, job_state=JobState.CANCELLED.name + ) + + return mock + + +@pytest.fixture(scope="function", autouse=True) +def mock_operator_dependencies(): + # We no-op time.sleep in tests. + with patch("time.sleep", return_value=None) as sleep, patch( + "armada.log_manager.KubernetesPodLogManager.fetch_container_logs" + ) as logs, patch( + "armada.operators.armada.ArmadaOperator.hook", new_callable=default_hook + ) as hook: + yield sleep, logs, hook + + +@pytest.fixture +def context(): + mock_ti = MagicMock() + mock_ti.task_id = DEFAULT_TASK_ID + mock_ti.try_number = 0 + mock_ti.xcom_pull.return_value = None + + mock_dag = MagicMock() + mock_dag.dag_id = "test_dag_1" + + context = {"ti": mock_ti, "run_id": "test_run_1", "dag": mock_dag} + + return context + + +def operator( + job_request: JobSubmitRequestItem, + deferrable: bool = False, + job_acknowledgement_timeout_s: int = 30, + container_logs: Optional[str] = None, +) -> ArmadaOperator: + operator = ArmadaOperator( + armada_queue=DEFAULT_QUEUE, + channel_args=GrpcChannelArgs(target="api.armadaproject.io:443"), + container_logs=container_logs, + deferrable=deferrable, + job_acknowledgement_timeout=job_acknowledgement_timeout_s, + job_request=job_request, + job_set_prefix="prefix-", + lookout_url_template="http://lookout.armadaproject.io/jobs?job_id=", + name="test", + task_id=DEFAULT_TASK_ID, + ) + + return operator + + +def running_job_context( + cluster: str = None, + submit_time: DateTime = DateTime.now(), + job_state: str = JobState.UNKNOWN.name, +) -> RunningJobContext: + return RunningJobContext( + DEFAULT_QUEUE, + DEFAULT_JOB_ID, + DEFAULT_JOB_SET, + submit_time, + cluster, + job_state=job_state, + ) + + +@pytest.mark.parametrize( + "job_states", + [ + [JobState.RUNNING, JobState.SUCCEEDED], + [ + JobState.QUEUED, + JobState.LEASED, + JobState.QUEUED, + JobState.RUNNING, + JobState.SUCCEEDED, + ], + ], + ids=["success", "success - multiple events"], +) +def test_execute(job_states, context): + op = operator(JobSubmitRequestItem()) + + op.hook.refresh_context.side_effect = [ + running_job_context(cluster="cluster-1", job_state=s.name) for s in job_states + ] + + op.execute(context) + + op.hook.submit_job.assert_called_once_with( + DEFAULT_QUEUE, DEFAULT_JOB_SET, op.job_request + ) + assert op.hook.refresh_context.call_count == len(job_states) + + # We're not polling for logs + op.pod_manager.fetch_container_logs.assert_not_called() + + +@patch("pendulum.DateTime.utcnow", return_value=DEFAULT_CURRENT_TIME) +def test_execute_in_deferrable(_, context): + op = operator(JobSubmitRequestItem(), deferrable=True) + op.hook.refresh_context.side_effect = [ + running_job_context(cluster="cluster-1", job_state=s.name) + for s in [JobState.QUEUED, JobState.QUEUED] + ] + + with pytest.raises(TaskDeferred) as deferred: + op.execute(context) + + op.hook.submit_job.assert_called_once_with( + DEFAULT_QUEUE, DEFAULT_JOB_SET, op.job_request + ) + assert deferred.value.timeout == op.execution_timeout + assert deferred.value.trigger == ArmadaPollJobTrigger( + moment=DEFAULT_CURRENT_TIME + timedelta(seconds=op.poll_interval), + context=op.job_context, + channel_args=op.channel_args, + ) + assert deferred.value.method_name == "_trigger_reentry" + + +@pytest.mark.parametrize( + "terminal_state", + [JobState.FAILED, JobState.PREEMPTED, JobState.CANCELLED], + ids=["failed", "preempted", "cancelled"], +) +def test_execute_fail(terminal_state, context): + op = operator(JobSubmitRequestItem()) + + op.hook.refresh_context.side_effect = [ + running_job_context(cluster="cluster-1", job_state=s.name) + for s in [JobState.RUNNING, terminal_state] + ] + + with pytest.raises(AirflowException) as exec_info: + op.execute(context) + + # Error message contain terminal state and job id + assert DEFAULT_JOB_ID in str(exec_info) + assert terminal_state.name in str(exec_info) + + op.hook.submit_job.assert_called_once_with( + DEFAULT_QUEUE, DEFAULT_JOB_SET, op.job_request + ) + assert op.hook.refresh_context.call_count == 2 + + # We're not polling for logs + op.pod_manager.fetch_container_logs.assert_not_called() + + +def test_on_kill_terminates_running_job(): + op = operator(JobSubmitRequestItem()) + job_context = running_job_context() + op.job_context = job_context + + op.on_kill() + op.on_kill() + + # We ensure we only try to cancel job once. + op.hook.cancel_job.assert_called_once_with(job_context) + + +def test_not_acknowledged_within_timeout_terminates_running_job(context): + job_context = running_job_context() + op = operator(JobSubmitRequestItem(), job_acknowledgement_timeout_s=-1) + op.hook.refresh_context.return_value = job_context + + with pytest.raises(AirflowException) as exec_info: + op.execute(context) + + # Error message contain terminal state and job id + assert DEFAULT_JOB_ID in str(exec_info) + assert JobState.CANCELLED.name in str(exec_info) + + # We also cancel already submitted job + op.hook.cancel_job.assert_called_once_with(job_context) + + +def test_polls_for_logs(context): + op = operator( + JobSubmitRequestItem(namespace="namespace-1"), container_logs="alpine" + ) + op.execute(context) + + # We polled logs as expected. + op.pod_manager.fetch_container_logs.assert_called_once_with( + k8s_context="cluster-1", + namespace="namespace-1", + pod="armada-test_job-0", + container="alpine", + since_time=None, + ) + + +def test_publishes_xcom_state(context): + op = operator(JobSubmitRequestItem()) + op.execute(context) + + lookout_url = f"http://lookout.armadaproject.io/jobs?job_id={DEFAULT_JOB_ID}" + context["ti"].xcom_push.assert_called_once_with( + key="0", + value={ + "armada_job_id": DEFAULT_JOB_ID, + "armada_job_set_id": DEFAULT_JOB_SET, + "armada_lookout_url": lookout_url, + "armada_queue": DEFAULT_QUEUE, + }, + ) + + +def test_reattaches_to_running_job(context): + op = operator(JobSubmitRequestItem()) + context["ti"].xcom_pull.return_value = { + "armada_job_id": DEFAULT_JOB_ID, + "armada_job_set_id": DEFAULT_JOB_SET, + "armada_queue": DEFAULT_QUEUE, + } + + op.execute(context) + + assert op.job_context == running_job_context( + job_state=JobState.SUCCEEDED.name, cluster=DEFAULT_CLUSTER + ) + op.hook.submit_job.assert_not_called() + + +@pytest.mark.skip("TODO") +def test_templates_job_request_item(): + pass diff --git a/third_party/airflow/test/unit/test_hooks.py b/third_party/airflow/test/unit/test_hooks.py new file mode 100644 index 00000000000..0a2e1ba2e11 --- /dev/null +++ b/third_party/airflow/test/unit/test_hooks.py @@ -0,0 +1,16 @@ +import pytest + + +@pytest.mark.skip("TODO") +def test_submits_job_using_armada_client(): + pass + + +@pytest.mark.skip("TODO") +def test_cancels_job_using_armada_client(): + pass + + +@pytest.mark.skip("TODO") +def test_updates_job_context(): + pass diff --git a/third_party/airflow/test/unit/test_model.py b/third_party/airflow/test/unit/test_model.py new file mode 100644 index 00000000000..906b7315ad9 --- /dev/null +++ b/third_party/airflow/test/unit/test_model.py @@ -0,0 +1,33 @@ +import grpc +from airflow.serialization.serde import deserialize, serialize +from armada.model import GrpcChannelArgs, RunningJobContext +from armada_client.typings import JobState +from pendulum import DateTime + + +def test_roundtrip_running_job_context(): + context = RunningJobContext( + "queue_123", + "job_id_123", + "job_set_id_123", + DateTime.utcnow(), + "cluster-1.armada.localhost", + DateTime.utcnow().add(minutes=-2), + JobState.RUNNING.name, + ) + + result = deserialize(serialize(context)) + assert context == result + assert JobState.RUNNING == result.state + + +def test_roundtrip_grpc_channel_args(): + channel_args = GrpcChannelArgs( + "armada-api.localhost", + [("key-1", 10), ("key-2", "value-2")], + grpc.Compression.NoCompression, + None, + ) + + result = deserialize(serialize(channel_args)) + assert channel_args == result diff --git a/third_party/airflow/test/unit/test_triggers.py b/third_party/airflow/test/unit/test_triggers.py new file mode 100644 index 00000000000..bdd15333caa --- /dev/null +++ b/third_party/airflow/test/unit/test_triggers.py @@ -0,0 +1,16 @@ +import pytest + + +@pytest.mark.skip("TODO") +def test_yields_with_context(): + pass + + +@pytest.mark.skip("TODO") +def test_cancels_running_job_when_task_is_cancelled(): + pass + + +@pytest.mark.skip("TODO") +def test_do_not_cancels_running_job_when_trigger_is_suspended(): + pass diff --git a/third_party/airflow/tox.ini b/third_party/airflow/tox.ini index 09dd8ce15ea..ed457e94d70 100644 --- a/third_party/airflow/tox.ini +++ b/third_party/airflow/tox.ini @@ -13,7 +13,7 @@ allowlist_externals = find xargs commands = - coverage run -m unittest discover + coverage run -m pytest test/unit/ coverage xml # This executes the dag files in examples but really only checks for imports and python errors bash -c "find examples/ -maxdepth 1 -type f -name *.py | xargs python3"