-
Notifications
You must be signed in to change notification settings - Fork 34
/
Copy pathSqlServerSource.java
224 lines (190 loc) · 8.14 KB
/
SqlServerSource.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
/*
* Copyright © 2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.mssql;
import com.google.common.base.Strings;
import io.cdap.cdap.api.annotation.Description;
import io.cdap.cdap.api.annotation.Macro;
import io.cdap.cdap.api.annotation.Metadata;
import io.cdap.cdap.api.annotation.MetadataProperty;
import io.cdap.cdap.api.annotation.Name;
import io.cdap.cdap.api.annotation.Plugin;
import io.cdap.cdap.api.data.schema.Schema;
import io.cdap.cdap.etl.api.FailureCollector;
import io.cdap.cdap.etl.api.batch.BatchSource;
import io.cdap.cdap.etl.api.batch.BatchSourceContext;
import io.cdap.cdap.etl.api.connector.Connector;
import io.cdap.plugin.common.Asset;
import io.cdap.plugin.common.ConfigUtil;
import io.cdap.plugin.common.LineageRecorder;
import io.cdap.plugin.db.SchemaReader;
import io.cdap.plugin.db.config.AbstractDBSpecificSourceConfig;
import io.cdap.plugin.db.connector.AbstractDBSpecificConnectorConfig;
import io.cdap.plugin.db.source.AbstractDBSource;
import io.cdap.plugin.util.DBUtils;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Batch source to read from MSSQL.
*/
@Plugin(type = BatchSource.PLUGIN_TYPE)
@Name(SqlServerConstants.PLUGIN_NAME)
@Description("Reads from a database table(s) using a configurable SQL query." +
" Outputs one record for each row returned by the query.")
@Metadata(properties = {@MetadataProperty(key = Connector.PLUGIN_TYPE, value = SqlServerConnector.NAME)})
public class SqlServerSource extends AbstractDBSource<SqlServerSource.SqlServerSourceConfig> {
private final SqlServerSourceConfig sqlServerSourceConfig;
public SqlServerSource(SqlServerSourceConfig sqlServerSourceConfig) {
super(sqlServerSourceConfig);
this.sqlServerSourceConfig = sqlServerSourceConfig;
}
@Override
protected String createConnectionString() {
return sqlServerSourceConfig.getConnectionString();
}
@Override
protected SchemaReader getSchemaReader() {
return new SqlServerSourceSchemaReader();
}
@Override
protected Class<? extends DBWritable> getDBRecordType() {
return SqlServerSourceDBRecord.class;
}
@Override
protected LineageRecorder getLineageRecorder(BatchSourceContext context) {
String fqn = DBUtils.constructFQN("mssql",
sqlServerSourceConfig.getConnection().getHost(),
sqlServerSourceConfig.getConnection().getPort(),
sqlServerSourceConfig.database, sqlServerSourceConfig.getReferenceName());
Asset asset = Asset.builder(sqlServerSourceConfig.getReferenceName()).setFqn(fqn).build();
return new LineageRecorder(context, asset);
}
/**
* MSSQL source config.
*/
public static class SqlServerSourceConfig extends AbstractDBSpecificSourceConfig {
public static final String NAME_USE_CONNECTION = "useConnection";
public static final String NAME_CONNECTION = "connection";
@Name(NAME_USE_CONNECTION)
@Nullable
@Description("Whether to use an existing connection.")
private Boolean useConnection;
@Name(NAME_CONNECTION)
@Macro
@Nullable
@Description("The existing connection to use.")
private SqlServerConnectorConfig connection;
@Name(DATABASE)
@Description("Database name to connect to")
@Macro
public String database;
@Name(SqlServerConstants.INSTANCE_NAME)
@Description(SqlServerConstants.INSTANCE_NAME_DESCRIPTION)
@Nullable
public String instanceName;
@Name(SqlServerConstants.QUERY_TIMEOUT)
@Description(SqlServerConstants.QUERY_TIMEOUT_DESCRIPTION)
@Nullable
public Integer queryTimeout = -1;
@Name(SqlServerConstants.CONNECT_TIMEOUT)
@Description(SqlServerConstants.CONNECT_TIMEOUT_DESCRIPTION)
@Nullable
public Integer connectTimeout;
@Name(SqlServerConstants.COLUMN_ENCRYPTION)
@Description(SqlServerConstants.COLUMN_ENCRYPTION_DESCRIPTION)
@Nullable
public Boolean columnEncryption;
@Name(SqlServerConstants.ENCRYPT)
@Description(SqlServerConstants.ENCRYPT_DESCRIPTION)
@Nullable
public Boolean encrypt;
@Name(SqlServerConstants.TRUST_SERVER_CERTIFICATE)
@Description(SqlServerConstants.TRUST_SERVER_CERTIFICATE_DESCRIPTION)
@Nullable
public Boolean trustServerCertificate;
@Name(SqlServerConstants.WORKSTATION_ID)
@Description(SqlServerConstants.WORKSTATION_ID_DESCRIPTION)
@Nullable
public String workstationId;
@Name(SqlServerConstants.FAILOVER_PARTNER)
@Description(SqlServerConstants.FAILOVER_PARTNER_DESCRIPTION)
@Nullable
public String failoverPartner;
@Name(SqlServerConstants.PACKET_SIZE)
@Description(SqlServerConstants.PACKET_SIZE_DESCRIPTION)
@Nullable
public Integer packetSize;
@Name(SqlServerConstants.CURRENT_LANGUAGE)
@Description(SqlServerConstants.CURRENT_LANGUAGE_DESCRIPTION)
@Nullable
public String currentLanguage;
@Override
public String getConnectionString() {
return String
.format(SqlServerConstants.SQL_SERVER_CONNECTION_STRING_FORMAT, connection.getHost(), connection.getPort(),
database);
}
@Override
public Map<String, String> getDBSpecificArguments() {
return SqlServerUtil.composeDbSpecificArgumentsMap(instanceName, connection.getAuthenticationType(), null,
connectTimeout, columnEncryption, encrypt,
trustServerCertificate, workstationId, failoverPartner,
packetSize, queryTimeout);
}
@Override
protected AbstractDBSpecificConnectorConfig getConnection() {
return connection;
}
@Override
public List<String> getInitQueries() {
if (!Strings.isNullOrEmpty(currentLanguage)) {
return Collections.singletonList(String.format(SqlServerConstants.SET_LANGUAGE_QUERY_FORMAT, currentLanguage));
}
return Collections.emptyList();
}
@Override
public String getTransactionIsolationLevel() {
return connection.getTransactionIsolationLevel();
}
@Override
public void validate(FailureCollector collector) {
ConfigUtil.validateConnection(this, useConnection, connection, collector);
super.validate(collector);
}
@Override
protected void validateField(FailureCollector collector, Schema.Field field, Schema actualFieldSchema,
Schema expectedFieldSchema) {
// we allow the case when actual type is Datetime but user manually set it to timestamp (datetime and datetime2)
// or string (datetimeoffset). To make it compatible with old behavior that convert datetime to timestamp.
// below validation is kind of loose, it's possible users try to manually map datetime to string or
// map datetimeoffset to timestamp which is invalid. In such case runtime will still fail even validation passes.
// But we don't have the original source type information here and don't want to do big refactoring here
if (actualFieldSchema.getLogicalType() == Schema.LogicalType.DATETIME &&
expectedFieldSchema.getLogicalType() == Schema.LogicalType.TIMESTAMP_MICROS ||
actualFieldSchema.getLogicalType() == Schema.LogicalType.DATETIME &&
expectedFieldSchema.getType() == Schema.Type.STRING) {
return;
}
super.validateField(collector, field, actualFieldSchema, expectedFieldSchema);
}
@Override
public boolean canConnect() {
return super.canConnect() && !containsMacro(DATABASE);
}
}
}