Skip to content

Commit

Permalink
[Fix] Save Pipeline resource to state in addition to spec (#3869)
Browse files Browse the repository at this point in the history
## Changes
After #3839, the provider's behavior of DLT pipelines regressed. In
particular, the `Read` method stopped populating certain fields from the
GetPipelineResponse into Terraform state. This PR addresses this by
additionally writing all top-level fields into the state as part of the
read operation.

Resolves #3855.

## Tests
Unit tests cover the case specified in the issue.

- [x] `make test` run locally
- [ ] relevant change in `docs/` folder
- [ ] covered with integration tests in `internal/acceptance`
- [ ] relevant acceptance tests are passing
- [ ] using Go SDK
  • Loading branch information
mgyucht authored Aug 14, 2024
1 parent 81be591 commit 42e1a45
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 15 deletions.
26 changes: 18 additions & 8 deletions pipelines/resource_pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,8 @@ type Pipeline struct {

func (Pipeline) Aliases() map[string]map[string]string {
return map[string]map[string]string{
"pipelines.Pipeline": aliasMap,
"pipelines.Pipeline": aliasMap,
"pipelines.PipelineSpec": aliasMap,
}

}
Expand Down Expand Up @@ -217,6 +218,7 @@ func (Pipeline) CustomizeSchema(s *common.CustomizableSchema) *common.Customizab
s.SchemaPath("cause").SetComputed()
s.SchemaPath("cluster_id").SetComputed()
s.SchemaPath("creator_user_name").SetComputed()
s.SchemaPath("run_as_user_name").SetComputed()

// SuppressDiff fields
s.SchemaPath("edition").SetSuppressDiff()
Expand Down Expand Up @@ -274,12 +276,7 @@ func ResourcePipeline() common.Resource {
if err != nil {
return err
}
err = Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
if err != nil {
return err
}
d.Set("url", c.FormatURL("#joblist/pipelines/", d.Id()))
return nil
return Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand All @@ -294,7 +291,20 @@ func ResourcePipeline() common.Resource {
if readPipeline.Spec == nil {
return fmt.Errorf("pipeline spec is nil for '%v'", readPipeline.PipelineId)
}
return common.StructToData(readPipeline.Spec, pipelineSchema, d)
p := Pipeline{
PipelineSpec: *readPipeline.Spec,
Cause: readPipeline.Cause,
ClusterId: readPipeline.ClusterId,
CreatorUserName: readPipeline.CreatorUserName,
Health: readPipeline.Health,
LastModified: readPipeline.LastModified,
LatestUpdates: readPipeline.LatestUpdates,
RunAsUserName: readPipeline.RunAsUserName,
State: readPipeline.State,
// Provides the URL to the pipeline in the Databricks UI.
URL: c.FormatURL("#joblist/pipelines/", d.Id()),
}
return common.StructToData(p, pipelineSchema, d)
},
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
w, err := c.WorkspaceClient()
Expand Down
68 changes: 61 additions & 7 deletions pipelines/resource_pipeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,11 @@ func TestResourcePipelineCreate(t *testing.T) {
e.Get(mock.Anything, pipelines.GetPipelineRequest{
PipelineId: "abcd",
}).Return(&pipelines.GetPipelineResponse{
PipelineId: "abcd",
Name: "test-pipeline",
State: pipelines.PipelineStateRunning,
Spec: &basicPipelineSpec,
PipelineId: "abcd",
Name: "test-pipeline",
State: pipelines.PipelineStateRunning,
LastModified: 123456,
Spec: &basicPipelineSpec,
}, nil).Once()

},
Expand Down Expand Up @@ -158,7 +159,9 @@ func TestResourcePipelineCreate(t *testing.T) {
}
`,
}.ApplyAndExpectData(t, map[string]any{
"id": "abcd",
"id": "abcd",
"last_modified": 123456,
"state": "RUNNING",
})
}

Expand Down Expand Up @@ -285,8 +288,59 @@ func TestResourcePipelineRead(t *testing.T) {
"key1": "value1",
"key2": "value2",
},
"filters.0.include.0": "com.databricks.include",
"continuous": false,
"cluster": []any{
map[string]any{
"apply_policy_default_values": false,
"autoscale": []any{},
"aws_attributes": []any{},
"azure_attributes": []any{},
"cluster_log_conf": []any{},
"driver_instance_pool_id": "",
"driver_node_type_id": "",
"enable_local_disk_encryption": false,
"gcp_attributes": []any{},
"init_scripts": []any{},
"instance_pool_id": "",
"node_type_id": "",
"num_workers": 0,
"policy_id": "",
"spark_conf": map[string]any{},
"spark_env_vars": map[string]any{},
"ssh_public_keys": []any{},
"label": "default",
"custom_tags": map[string]any{
"cluster_tag1": "cluster_value1",
},
},
},
"library": []any{
map[string]any{
"file": []any{},
"maven": []any{},
"jar": "",
"whl": "",
"notebook": []any{
map[string]any{
"path": "/Test",
},
},
},
},
"filters": []any{
map[string]any{
"include": []any{"com.databricks.include"},
"exclude": []any{"com.databricks.exclude"},
},
},
"deployment": []any{
map[string]any{
"kind": "BUNDLE",
"metadata_file_path": "/foo/bar",
},
},
"edition": "ADVANCED",
"channel": "CURRENT",
"continuous": false,
})
}

Expand Down

0 comments on commit 42e1a45

Please sign in to comment.