Skip to content

Commit d18fd18

Browse files
Mahsa Hanifihelayoty
Mahsa Hanifi
authored andcommitted
added the first draft of the az-svc-data-integration-mlw
ran the terraform recursive and fixed it changed the cares name into sample Add adf and fix AF to use id
1 parent 5ba8c23 commit d18fd18

39 files changed

+1855
-116
lines changed

infra/modules/providers/azure/data-factory/README.md

+1-10
Original file line numberDiff line numberDiff line change
@@ -17,15 +17,13 @@ An instance of the `data-factory` module deploys the _**Data Factory**_ in order
1717
- Ability to provision a single Data Factory instance
1818
- Ability to provision a configurable Pipeline
1919
- Ability to configure Trigger
20-
- Ability to configure SQL server Dataset
21-
- Ability to configure SQL server Linked Service
20+
2221

2322
## Out Of Scope
2423

2524
The following are not support in the time being
2625

2726
- Creating Multiple pipelines
28-
- Only SQL server Dataset/Linked Service are implemented.
2927

3028
## Definition
3129

@@ -35,8 +33,6 @@ Terraform resources used to define the `data-factory` module include the followi
3533
- [azurerm_data_factory_integration_runtime_managed](https://www.terraform.io/docs/providers/azurerm/r/data_factory_integration_runtime_managed.html)
3634
- [azurerm_data_factory_pipeline](https://www.terraform.io/docs/providers/azurerm/r/data_factory_pipeline.html)
3735
- [azurerm_data_factory_trigger_schedule](https://www.terraform.io/docs/providers/azurerm/r/data_factory_trigger_schedule.html)
38-
- [azurerm_data_factory_dataset_sql_server](https://www.terraform.io/docs/providers/azurerm/r/data_factory_dataset_sql_server_table.html)
39-
- [azurerm_data_factory_linked_service_sql_server](https://www.terraform.io/docs/providers/azurerm/r/data_factory_linked_service_sql_server.html)
4036

4137
## Usage
4238

@@ -60,11 +56,6 @@ module "data_factory" {
6056
data_factory_trigger_name = "adftrigger"
6157
data_factory_trigger_interval = 1
6258
data_factory_trigger_frequency = "Minute"
63-
data_factory_dataset_sql_name = "adfsqldataset"
64-
data_factory_dataset_sql_table_name = "adfsqldatasettable"
65-
data_factory_dataset_sql_folder = ""
66-
data_factory_linked_sql_name = "adfsqllinked"
67-
data_factory_linked_sql_connection_string = "Server=tcp:adfsql..."
6859
}
6960
```
7061

infra/modules/providers/azure/data-factory/datasets.tf

-8
This file was deleted.

infra/modules/providers/azure/data-factory/linkedservices.tf

-8
This file was deleted.

infra/modules/providers/azure/data-factory/output.tf

-10
Original file line numberDiff line numberDiff line change
@@ -28,16 +28,6 @@ output "trigger_interval" {
2828
value = azurerm_data_factory_trigger_schedule.main.interval
2929
}
3030

31-
output "sql_dataset_id" {
32-
description = "The ID of the SQL server dataset created"
33-
value = azurerm_data_factory_dataset_sql_server_table.main.id
34-
}
35-
36-
output "sql_linked_service_id" {
37-
description = "The ID of the SQL server Linked service created"
38-
value = azurerm_data_factory_linked_service_sql_server.main.id
39-
}
40-
4131
output "adf_identity_principal_id" {
4232
description = "The ID of the principal(client) in Azure active directory"
4333
value = azurerm_data_factory.main.identity[0].principal_id

infra/modules/providers/azure/data-factory/terraform.tfvars.template

+1-4
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,7 @@ resource_group_name = ""
22
data_factory_name = ""
33
data_factory_runtime_name = ""
44
data_factory_pipeline_name = ""
5-
data_factory_dataset_sql_name = ""
6-
data_factory_dataset_sql_table_name = ""
7-
data_factory_linked_sql_name = ""
8-
data_factory_linked_sql_connection_string = ""
5+
data_factory_trigger_name = ""
96
vnet_integration = {
107
vnet_id = ""
118
subnet_name = ""

infra/modules/providers/azure/data-factory/tests/integration/data_factory_integration_test.go

-10
Original file line numberDiff line numberDiff line change
@@ -25,16 +25,6 @@ func TestDataFactory(t *testing.T) {
2525
"data_factory_name",
2626
"pipeline_name",
2727
),
28-
VerifyCreatedDataset(subscription,
29-
"resource_group_name",
30-
"data_factory_name",
31-
"sql_dataset_id",
32-
),
33-
VerifyCreatedLinkedService(subscription,
34-
"resource_group_name",
35-
"data_factory_name",
36-
"sql_linked_service_id",
37-
),
3828
},
3929
}
4030
integration.RunIntegrationTests(&testFixture)
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,9 @@
1-
resource_group_name = "adftest"
2-
data_factory_name = "adftest"
3-
data_factory_runtime_name = "adfrttest"
4-
data_factory_pipeline_name = "testpipeline"
5-
data_factory_trigger_name = "testtrigger"
6-
data_factory_dataset_sql_name = "testsql"
7-
data_factory_dataset_sql_table_name = "adfsqltableheba"
8-
data_factory_linked_sql_name = "testlinkedsql"
9-
data_factory_linked_sql_connection_string = "connectionstring"
1+
resource_group_name = ""
2+
data_factory_name = ""
3+
data_factory_runtime_name = ""
4+
data_factory_pipeline_name = ""
5+
data_factory_trigger_name = ""
106
vnet_integration = {
11-
vnet_id = "/subscriptions/resourceGroups/providers/Microsoft.Network/virtualNetworks/testvnet"
12-
subnet_name = "default"
7+
vnet_id = ""
8+
subnet_name = ""
139
}

infra/modules/providers/azure/data-factory/tests/unit/data_factory_unit_test.go

+1-25
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,11 @@
11
package unit
22

33
import (
4-
"encoding/json"
5-
"strings"
64
"testing"
7-
8-
"github.com/gruntwork-io/terratest/modules/random"
95
tests "github.com/microsoft/cobalt/infra/modules/providers/azure/data-factory/tests"
106
"github.com/microsoft/terratest-abstraction/unit"
117
)
128

13-
// helper function to parse blocks of JSON into a generic Go map
14-
func asMap(t *testing.T, jsonString string) map[string]interface{} {
15-
var theMap map[string]interface{}
16-
if err := json.Unmarshal([]byte(jsonString), &theMap); err != nil {
17-
t.Fatal(err)
18-
}
19-
return theMap
20-
}
21-
229
func TestTemplate(t *testing.T) {
2310

2411
expectedDataFactory := map[string]interface{}{
@@ -53,27 +40,16 @@ func TestTemplate(t *testing.T) {
5340
"frequency": "Minute",
5441
}
5542

56-
expectedDatasetSQL := map[string]interface{}{
57-
"name": "testsql",
58-
}
59-
60-
expectedLinkedSQL := map[string]interface{}{
61-
"name": "testlinkedsql",
62-
"connection_string": "connectionstring",
63-
}
64-
6543
testFixture := unit.UnitTestFixture{
6644
GoTest: t,
6745
TfOptions: tests.DataFactoryTFOptions,
6846
PlanAssertions: nil,
69-
ExpectedResourceCount: 6,
47+
ExpectedResourceCount: 4,
7048
ExpectedResourceAttributeValues: unit.ResourceDescription{
7149
"azurerm_data_factory.main": expectedDataFactory,
7250
"azurerm_data_factory_integration_runtime_managed.main": expectedDFIntRunTime,
7351
"azurerm_data_factory_pipeline.main": expectedPipeline,
7452
"azurerm_data_factory_trigger_schedule.main": expectedTrigger,
75-
"azurerm_data_factory_dataset_sql_server_table.main": expectedDatasetSQL,
76-
"azurerm_data_factory_linked_service_sql_server.main": expectedLinkedSQL,
7753
},
7854
}
7955

infra/modules/providers/azure/data-factory/variables.tf

-30
Original file line numberDiff line numberDiff line change
@@ -68,34 +68,4 @@ variable "data_factory_trigger_frequency" {
6868
description = "The trigger freqency. Valid values include Minute, Hour, Day, Week, Month. Defaults to Minute."
6969
type = string
7070
default = "Minute"
71-
}
72-
73-
variable "data_factory_dataset_sql_name" {
74-
description = "Specifies the name of the Data Factory Dataset SQL Server Table. Only letters, numbers and '_' are allowed."
75-
type = string
76-
default = ""
77-
}
78-
79-
variable "data_factory_dataset_sql_table_name" {
80-
description = "The table name of the Data Factory Dataset SQL Server Table."
81-
type = string
82-
default = ""
83-
}
84-
85-
variable "data_factory_dataset_sql_folder" {
86-
description = "The folder that this Dataset is in. If not specified, the Dataset will appear at the root level."
87-
type = string
88-
default = ""
89-
}
90-
91-
variable "data_factory_linked_sql_name" {
92-
description = "Specifies the name of the Data Factory Linked Service SQL Server. Changing this forces a new resource to be created."
93-
type = string
94-
default = ""
95-
}
96-
97-
variable "data_factory_linked_sql_connection_string" {
98-
description = "The connection string in which to authenticate with the SQL Server."
99-
type = string
100-
default = ""
10171
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
export ARM_ACCESS_KEY=
2+
export ARM_CLIENT_ID=
3+
export ARM_CLIENT_SECRET=
4+
export ARM_SUBSCRIPTION_ID=
5+
export ARM_TENANT_ID=
6+
export BUILD_BUILDID=1
7+
export GO_VERSION=1.12.5
8+
export TF_VAR_remote_state_account=
9+
export TF_VAR_remote_state_container=
10+
export TF_VERSION=0.12.4
11+
export TF_WARN_OUTPUT_ERRORS=1
12+
export TF_VAR_resource_group_location=eastus
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,139 @@
1+
# Azure Application Services
2+
3+
The `az-svc-data-integration-mlw` template is intended to be a reference for running a set of app services.
4+
5+
6+
## Use-Case
7+
8+
This particular template creates an Azure environment with a small set of fully managed microservices.
9+
10+
11+
## Scenarios this template should avoid
12+
13+
This template is an adequate solution where the service count is less than 10. For Azure customers interested with provisioning more than 10 services, we recommend using AKS. Reason being that with Kubernetes you can maximize cluster node CPU cores which helps minimize cloud resourcing costs.
14+
15+
## Technical Design
16+
Template design [specifications](docs/design/README.md).
17+
18+
## Architecture
19+
![Template Topology](docs/design/images/deployment-topology.jpg "Template Topology")
20+
21+
22+
## Prerequisites
23+
24+
1. Azure Subscription
25+
2. An available Service Principal with API Permissions granted with Admin Consent within Azure app registration. The required Azure Active Directory Graph app role is `Application.ReadWrite.OwnedBy`
26+
27+
![image](https://user-images.githubusercontent.com/7635865/71312782-d9b91800-23f4-11ea-80ee-cc646f1c74be.png)
28+
29+
3. Terraform and Go are locally installed
30+
4. Azure Storage Account is [setup](https://docs.microsoft.com/en-us/azure/terraform/terraform-backend) to store Terraform state
31+
5. Set up your Local environment variables by creating a `.env` file that contains the following information:
32+
33+
```
34+
ARM_SUBSCRIPTION_ID="<az-service-principal-subscription-id>"
35+
ARM_CLIENT_ID="<az-service-principal-client-id>"
36+
ARM_CLIENT_SECRET="<az-service-principal-auth-secret>"
37+
ARM_TENANT_ID="<az-service-principal-tenant>"
38+
ARM_ACCESS_KEY="<remote-state-storage-account-primary-key>"
39+
TF_VAR_remote_state_account="<tf-remote-state-storage-account-name>"
40+
TF_VAR_remote_state_container="<tf-remote-state-storage-container-name>"
41+
```
42+
43+
## Cost
44+
45+
Azure environment cost ballpark [estimate](https://azure.com/e/92b05a7cd1e646368ab74772e3122500). This is subject to change and is driven from the resource pricing tiers configured when the template is deployed.
46+
47+
## Deployment Steps
48+
49+
1. Execute the following commands to set up your local environment variables:
50+
51+
*Note for Windows Users using WSL*: We recommend running dos2unix utility on the environment file via `dos2unix .env` prior to sourcing your environment variables to chop trailing newline and carriage return characters.
52+
53+
```bash
54+
# these commands setup all the environment variables needed to run this template
55+
DOT_ENV=<path to your .env file>
56+
export $(cat $DOT_ENV | xargs)
57+
```
58+
59+
2. Execute the following command to configure your local Azure CLI.
60+
61+
```bash
62+
# This logs your local Azure CLI in using the configured service principal.
63+
az login --service-principal -u $ARM_CLIENT_ID -p $ARM_CLIENT_SECRET --tenant $ARM_TENANT_ID
64+
```
65+
66+
3. Navigate to the `terraform.tfvars` terraform file. Here's a sample of the terraform.tfvars file for this template.
67+
68+
```HCL
69+
resource_group_location = "centralus"
70+
prefix = "test-services"
71+
72+
# Targets that will be configured to also setup AuthN with Easy Auth
73+
app_services = [
74+
{
75+
app_name = "tf-test-svc-1"
76+
image = null
77+
app_settings = {
78+
"one_sweet_app_setting" = "brilliant"
79+
}
80+
},
81+
{
82+
app_name = "tf-test-svc-2"
83+
image = null
84+
app_settings = {
85+
"another_sweet_svc_app_setting" = "ok"
86+
}
87+
}
88+
]
89+
```
90+
91+
4. Execute the following commands to set up your terraform workspace.
92+
93+
```bash
94+
# This configures terraform to leverage a remote backend that will help you and your
95+
# team keep consistent state
96+
terraform init -backend-config "storage_account_name=${TF_VAR_remote_state_account}" -backend-config "container_name=${TF_VAR_remote_state_container}"
97+
98+
# This command configures terraform to use a workspace unique to you. This allows you to work
99+
# without stepping over your teammate's deployments
100+
TF_WORKSPACE="az-micro-svc-$USER"
101+
terraform workspace new $TF_WORKSPACE || terraform workspace select $TF_WORKSPACE
102+
```
103+
104+
5. Execute the following commands to orchestrate a deployment.
105+
106+
```bash
107+
# See what terraform will try to deploy without actually deploying
108+
terraform plan
109+
110+
# Execute a deployment
111+
terraform apply
112+
```
113+
114+
6. Optionally execute the following command to teardown your deployment and delete your resources.
115+
116+
```bash
117+
# Destroy resources and tear down deployment. Only do this if you want to destroy your deployment.
118+
terraform destroy
119+
```
120+
121+
## Automated Testing
122+
123+
### Unit Testing
124+
125+
Navigate to the template folder `infra/templates/az-svc-data-integration-mlw`. Unit tests can be run using the following command:
126+
127+
```
128+
go test -v $(go list ./... | grep "unit")
129+
```
130+
131+
### Integration Testing
132+
133+
Please confirm that you've completed the `terraform apply` step before running the integration tests as we're validating the active terraform workspace.
134+
135+
Integration tests can be run using the following command:
136+
137+
```
138+
go test -v $(go list ./... | grep "integration")
139+
```

0 commit comments

Comments
 (0)