Skip to content

feat: Support MySQL full database sync #27

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>

<flink.sql.cdc.version>2.4.2</flink.sql.cdc.version>
<checkstyle.version>8.14</checkstyle.version>
<junit.version>4.13.2</junit.version>
<scala.binary.version>2.12</scala.binary.version>
Expand Down Expand Up @@ -177,6 +177,18 @@
<version>${flink.version}</version>
<scope>${flink.scope}</scope>
</dependency>
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-oracle-cdc</artifactId>
<version>${flink.sql.cdc.version}</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>flink-shaded-guava</artifactId>
<groupId>org.apache.flink</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.flink</groupId>-->
<!-- <artifactId>flink-table-planner_${scala.binary.version}</artifactId>-->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ public DynamicTableSink createDynamicTableSink(Context context) {
Properties databendProperties =
getDatabendProperties(context.getCatalogTable().getOptions());
return new DatabendDynamicTableSink(
getDmlOptions(config),
getDmlOptions(config, databendProperties),
databendProperties,
getDmlOptions(config).getPrimaryKeys(),
getDmlOptions(config, databendProperties).getPrimaryKeys(),
catalogTable.getPartitionKeys().toArray(new String[0]),
context.getPhysicalRowDataType());
}
Expand All @@ -63,7 +63,7 @@ public DynamicTableSource createDynamicTableSource(Context context) {
Properties databendProperties =
getDatabendProperties(context.getCatalogTable().getOptions());
return new DatabendDynamicTableSource(
getReadOptions(config), databendProperties, context.getPhysicalRowDataType());
getReadOptions(config, databendProperties), databendProperties, context.getPhysicalRowDataType());
}

@Override
Expand Down Expand Up @@ -102,7 +102,7 @@ private void validateConfigOptions(ReadableConfig config) {
}
}

public DatabendDmlOptions getDmlOptions(ReadableConfig config) {
public DatabendDmlOptions getDmlOptions(ReadableConfig config, Properties databendProperties) {
return new DatabendDmlOptions.Builder()
.withUrl(config.get(URL))
.withUsername(config.get(USERNAME))
Expand All @@ -116,16 +116,18 @@ public DatabendDmlOptions getDmlOptions(ReadableConfig config) {
.withPrimaryKey(config.get(SINK_PRIMARY_KEYS).toArray(new String[0]))
.withIgnoreDelete(config.get(SINK_IGNORE_DELETE))
.withParallelism(config.get(SINK_PARALLELISM))
.withConnectionProperties(databendProperties)
.build();
}

private DatabendReadOptions getReadOptions(ReadableConfig config) {
private DatabendReadOptions getReadOptions(ReadableConfig config, Properties databendProperties) {
return new DatabendReadOptions.Builder()
.withUrl(config.get(URL))
.withUsername(config.get(USERNAME))
.withPassword(config.get(PASSWORD))
.withDatabaseName(config.get(DATABASE_NAME))
.withTableName(config.get(TABLE_NAME))
.withConnectionProperties(databendProperties)
.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package org.apache.flink.connector.databend.catalog.databend;

public enum DataModel {
DUPLICATE,
UNIQUE,
AGGREGATE
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package org.apache.flink.connector.databend.catalog.databend;

import org.apache.flink.annotation.Public;
import org.apache.flink.connector.databend.internal.connection.DatabendConnectionProvider;
import org.apache.flink.connector.databend.internal.options.DatabendConnectionOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;

/**
* Databend System Operate.
*/
@Public
public class DatabendSystem implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(DatabendSystem.class);
private final DatabendConnectionProvider jdbcConnectionProvider;
private static final List<String> builtinDatabases =
Collections.singletonList("information_schema");

public DatabendSystem(DatabendConnectionOptions options) {
this.jdbcConnectionProvider = new DatabendConnectionProvider(options, options.getConnectionProperties());
}


private List<String> identifier(List<String> name) {
List<String> result = name.stream().map(m -> identifier(m)).collect(Collectors.toList());
return result;
}

private String identifier(String name) {
return "`" + name + "`";
}

private String quoteProperties(String name) {
return "'" + name + "'";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
package org.apache.flink.connector.databend.catalog.databend;

public class DatabendType {
public static final String BOOLEAN = "BOOLEAN";
public static final String TINYINT = "TINYINT";
public static final String SMALLINT = "SMALLINT";
public static final String INT = "INT";
public static final String BIGINT = "BIGINT";
public static final String FLOAT = "FLOAT";
public static final String DOUBLE = "DOUBLE";
public static final String DECIMAL = "DECIMAL";
public static final String DATE = "DATE";
public static final String DATETIME = "DATETIME";
public static final String VARCHAR = "VARCHAR";
public static final String STRING = "STRING";
public static final String BITMAP = "BITMAP";
public static final String ARRAY = "ARRAY";
public static final String JSON = "JSON";
public static final String MAP = "MAP";
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package org.apache.flink.connector.databend.catalog.databend;

public class FieldSchema {
private String name;
private String typeString;
private String comment;

public FieldSchema() {
}

public FieldSchema(String name, String typeString, String comment) {
this.name = name;
this.typeString = typeString;
this.comment = comment;
}

public String getName() {
return name;
}

public void setName(String name) {
this.name = name;
}

public String getTypeString() {
return typeString;
}

public void setTypeString(String typeString) {
this.typeString = typeString;
}

public String getComment() {
return comment;
}

public void setComment(String comment) {
this.comment = comment;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package org.apache.flink.connector.databend.catalog.databend;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class TableSchema {
private String database;
private String table;
private String tableComment;
private Map<String, FieldSchema> fields;
private List<String> keys = new ArrayList<>();
private DataModel model = DataModel.DUPLICATE;
private Map<String, String> properties = new HashMap<>();


public String getDatabase() {
return database;
}

public String getTable() {
return table;
}

public String getTableComment() {
return tableComment;
}

public Map<String, FieldSchema> getFields() {
return fields;
}

public List<String> getKeys() {
return keys;
}

public DataModel getModel() {
return model;
}

public Map<String, String> getProperties() {
return properties;
}

public void setDatabase(String database) {
this.database = database;
}

public void setTable(String table) {
this.table = table;
}

public void setTableComment(String tableComment) {
this.tableComment = tableComment;
}

public void setFields(Map<String, FieldSchema> fields) {
this.fields = fields;
}

public void setKeys(List<String> keys) {
this.keys = keys;
}

public void setModel(DataModel model) {
this.model = model;
}


public void setProperties(Map<String, String> properties) {
this.properties = properties;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.apache.flink.connector.databend.exception;

/**
* Create Table exception.
*/
public class CreateTableException extends RuntimeException {
public CreateTableException() {
super();
}

public CreateTableException(String message) {
super(message);
}

public CreateTableException(String message, Throwable cause) {
super(message, cause);
}

public CreateTableException(Throwable cause) {
super(cause);
}

protected CreateTableException(
String message,
Throwable cause,
boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
package org.apache.flink.connector.databend.internal.options;

import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.Optional;
import javax.annotation.Nullable;
import java.util.Properties;

/**
* Databend connection options.
Expand All @@ -21,13 +22,16 @@ public class DatabendConnectionOptions implements Serializable {

private final String tableName;

private final Properties connectionProperties;

public DatabendConnectionOptions(
String url, @Nullable String username, @Nullable String password, String databaseName, String tableName) {
String url, @Nullable String username, @Nullable String password, String databaseName, String tableName, Properties connectionProperties) {
this.url = url;
this.username = username;
this.password = password;
this.databaseName = databaseName;
this.tableName = tableName;
this.connectionProperties = connectionProperties;
}

public String getUrl() {
Expand All @@ -49,4 +53,8 @@ public String getDatabaseName() {
public String getTableName() {
return this.tableName;
}

public Properties getConnectionProperties() {
return this.connectionProperties;
}
}
Loading