Skip to content

Commit

Permalink
Merge pull request #1737 from running-elephant/pre-release
Browse files Browse the repository at this point in the history
Pre release - 1.0.0-rc.0
  • Loading branch information
scottsut authored Jul 25, 2022
2 parents bbd4d41 + 748cb37 commit 30a07e9
Show file tree
Hide file tree
Showing 256 changed files with 59,624 additions and 2,663 deletions.
35 changes: 35 additions & 0 deletions .github/workflows/dev-ut-stage.js.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions

name: Node.js CI

on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]

jobs:
build:

runs-on: ubuntu-latest

strategy:
matrix:
node-version: [14.x, 16.x]
# See supported Node.js release schedule at https://nodejs.org/en/about/releases/

steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- run: npm ci
working-directory: ./frontend
- run: npm run build --if-present
working-directory: ./frontend
- run: npm run test:ci
working-directory: ./frontend
12 changes: 10 additions & 2 deletions config/jdbc-driver-ext.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
#EXAPLE:
# db-type: "example"
# name: "example"
# driver-class: "com.example.jdbc.Driver"
# literal-quote: "'"
# identifier-quote: "`"
# url-prefix: jdbc:example://
# driver-class: "com.example.jdbc.Driver" # optional
# url-prefix: "jdbc:example://" # optional
# sql-dialect: "datart.data.provider.calcite.dialect.ExampleSqlDialectSupport" # optional
# identifier-end-quote: "`" # optional
# literal-end-quote: "'" # optional
# identifier-escaped-quote: "`" # optional
# adapter-class: 'datart.data.provider.jdbc.adapters.ExampleDataProviderAdapter' # optional
# quote-identifiers: true # optional
# support-sql-limit: true # optional
#

IMPALA:
db-type: "impala"
Expand Down
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<parent>
<artifactId>datart-parent</artifactId>
<groupId>datart</groupId>
<version>1.0.0-beta.4</version>
<version>1.0.0-rc.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
51 changes: 49 additions & 2 deletions core/src/main/java/datart/core/data/provider/Dataframe.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
import lombok.Data;

import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import java.util.*;
import java.util.stream.Collectors;


@Data
Expand Down Expand Up @@ -62,4 +62,51 @@ public static Dataframe empty() {
return dataframe;
}

// 按照指定的列定义,将数据集按照表名称进行分割,以还原原始表结构
public Dataframes splitByTable(Map<String, Column> newSchema) {
Map<Integer, String> tableColumnIndex = new HashMap<>();
for (int i = 0; i < columns.size(); i++) {
Column column = columns.get(i);
Column schemaColumn = newSchema.get(column.columnKey());
tableColumnIndex.put(i, schemaColumn.tableName());
}
Map<String, List<List<Object>>> tableRows = newSchema
.values()
.stream()
.map(Column::tableName)
.distinct()
.collect(Collectors.toMap(k -> k, v -> new ArrayList()));
for (List<Object> row : rows) {
int i = 0;
Map<String, List<Object>> tableRowMap = new HashMap<>();
for (Object item : row) {
String tableName = tableColumnIndex.get(i);
tableRowMap.computeIfAbsent(tableName, v -> new ArrayList<>()).add(item);
i++;
}
for (String key : tableRowMap.keySet()) {
tableRows.get(key).add(tableRowMap.get(key));
}
}
Map<String, List<Column>> tableColumns = new HashMap<>();
for (int i = 0; i < columns.size(); i++) {
Column column = columns.get(i);
Column newColumn = newSchema.get(column.columnKey());
String tableName = newColumn.tableName();
newColumn.setName(newColumn.columnName());
tableColumns.computeIfAbsent(tableName, v -> new ArrayList<>())
.add(newColumn);
}
Dataframe[] dataframes = tableColumns.keySet().stream()
.map(tableName -> {
Dataframe df = new Dataframe();
df.setName(tableName);
df.setColumns(tableColumns.get(tableName));
df.setRows(tableRows.get(tableName));
return df;
}).toArray(Dataframe[]::new);
return Dataframes.of(id, dataframes);
}


}
6 changes: 5 additions & 1 deletion core/src/main/java/datart/core/data/provider/Dataframes.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public class Dataframes implements Serializable {

private final String key;

List<Dataframe> dataframes;
private final List<Dataframe> dataframes;

private Dataframes(String key) {
this.key = "DB" + key;
Expand All @@ -54,4 +54,8 @@ public static Dataframes of(String key, Dataframe... dataframes) {
public void add(Dataframe df) {
dataframes.add(df);
}

public int size() {
return dataframes.size();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
public interface ShareMapperExt extends ShareMapper {

@Select({"<script>",
"SELECT * FROM `share` where viz_id = #{vizId} ORDER BY create_time",
"SELECT * FROM `share` where viz_id = #{vizId} AND create_by &lt;&gt; 'SCHEDULER' ORDER BY create_time",
"</script>"})
List<Share> selectByViz(String vizId);

Expand Down
2 changes: 1 addition & 1 deletion data-providers/data-provider-base/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<parent>
<artifactId>datart-data-provider</artifactId>
<groupId>datart</groupId>
<version>1.0.0-beta.4</version>
<version>1.0.0-rc.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ private void excludeColumns(Dataframe data, Set<SelectColumn> include) {
if (include
.stream()
.noneMatch(selectColumn ->
column.getName().equals(selectColumn.getColumnKey()) || column.getName().equals(selectColumn.getAlias()))) {
column.columnKey().equals(selectColumn.getColumnKey()) || column.columnKey().equals(selectColumn.getAlias()))) {
excludeIndex.add(i);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@
*/
package datart.data.provider.calcite;

import datart.core.base.exception.Exceptions;
import datart.core.data.provider.ScriptVariable;
import datart.data.provider.jdbc.SimpleVariablePlaceholder;
import datart.data.provider.script.SqlStringUtils;
import datart.data.provider.script.VariablePlaceholder;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.*;
Expand Down Expand Up @@ -121,16 +123,10 @@ private void createVariablePlaceholders(SqlCall logicExpressionCall, Set<SqlIden
}

logicExpressionCall = SpecialSqlCallConverter.convert(logicExpressionCall);
int startIndex = logicExpressionCall.getParserPosition().getColumnNum();
int endIndex = logicExpressionCall.getParserPosition().getEndColumnNum();
// 处理calcite不把左右括号算进index,导致的index错位问题
if (startIndex > 1 && srcSql.charAt(startIndex - 2) == '(') {
startIndex = startIndex - 1;
}
if (endIndex < srcSql.length() && srcSql.charAt(endIndex) == ')') {
endIndex = endIndex + 1;
}
String originalSqlFragment = srcSql.substring(startIndex - 1, endIndex).trim();
int startIndex = logicExpressionCall.getParserPosition().getColumnNum() - 1;
int endIndex = logicExpressionCall.getParserPosition().getEndColumnNum() - 1;

String originalSqlFragment = fixMissedParentheses(srcSql, startIndex, endIndex);

List<ScriptVariable> variables = new LinkedList<>();
for (SqlIdentifier identifier : variableIdentifier) {
Expand All @@ -145,4 +141,50 @@ private void createVariablePlaceholders(SqlCall logicExpressionCall, Set<SqlIden
variablePlaceholders.add(new VariablePlaceholder(variables, sqlDialect, logicExpressionCall, originalSqlFragment));
}

// 处理calcite某些情况下不把左右括号算进index,导致的index错位问题
private String fixMissedParentheses(String srcSql, int startIndex, int endIndex) {

String originalSqlFragment = srcSql.substring(startIndex, endIndex + 1).trim();

char[] missedParentheses = SqlStringUtils.findMissedParentheses(originalSqlFragment);
if (missedParentheses.length != 0) {
int left = 0;
int right = 0;
for (char parenthesis : missedParentheses) {
if (parenthesis == '(') {
left++;
} else {
right++;
}
}
while (left != 0) {
startIndex--;
if (startIndex < 0) {
Exceptions.msg("There are mismatched parentheses nearby " + originalSqlFragment);
}
if (srcSql.charAt(startIndex) == ' ') {
continue;
}
if (srcSql.charAt(startIndex) == '(') {
left--;
}
}
while (right != 0) {
endIndex++;
if (endIndex >= srcSql.length()) {
Exceptions.msg("There are mismatched parentheses nearby " + originalSqlFragment);
}
if (srcSql.charAt(endIndex) == ' ') {
continue;
}
if (srcSql.charAt(endIndex) == ')') {
right--;
} else {
Exceptions.msg("There are mismatched parentheses nearby " + originalSqlFragment);
}
}
}
return srcSql.substring(startIndex, endIndex + 1).trim();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ public QueryScriptProcessResult process(QueryScript queryScript) {
SqlNode conditionNode = null;
if (!CollectionUtils.isEmpty(tableJoin.getConditions())) {
for (JoinCondition joinCondition : tableJoin.getConditions()) {
if (!joinCondition.isValid()) {
continue;
}
SqlBasicCall condition = new SqlBasicCall(SqlStdOperatorTable.EQUALS
, new SqlNode[]{SqlNodeUtils.createSqlIdentifier(joinCondition.getLeft())
, SqlNodeUtils.createSqlIdentifier(joinCondition.getRight())}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ public static List<VariablePlaceholder> resolve(SqlDialect sqlDialect, String sr
while (matcher.find()) {
String group = matcher.group();
ScriptVariable scriptVariable = variableMap.get(group);
variablePlaceholderMap.put(group, scriptVariable);
if (scriptVariable != null) {
variablePlaceholderMap.put(group, scriptVariable);
}
}
if (variablePlaceholderMap.isEmpty()) {
return Collections.emptyList();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import datart.core.base.consts.Const;
import datart.core.base.exception.Exceptions;
import datart.core.common.Application;
import datart.core.common.UUIDGenerator;
import datart.core.data.provider.*;
import datart.data.provider.calcite.dialect.H2Dialect;
import datart.data.provider.jdbc.DataTypeUtils;
Expand All @@ -36,8 +37,8 @@
import org.h2.tools.SimpleResultSet;

import java.sql.*;
import java.util.*;
import java.util.Date;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

@Slf4j
Expand Down Expand Up @@ -171,9 +172,12 @@ public static Dataframe executeLocalQuery(QueryScript queryScript, ExecuteParam
* @return 查询脚本+执行参数 执行后结果
*/
public static Dataframe executeLocalQuery(QueryScript queryScript, ExecuteParam executeParam, Dataframes dataframes, boolean persistent, Date expire) throws Exception {
if (queryScript == null) {
if (queryScript == null || (dataframes.size() == 1 && dataframes.getDataframes().get(0).getName() == null)) {
// 直接以指定数据源为表进行查询,生成一个默认的SQL查询全部数据
queryScript = new QueryScript();
if (dataframes.getDataframes().get(0).getName() == null) {
dataframes.getDataframes().get(0).setName("Q" + UUIDGenerator.generate());
}
queryScript.setScript(String.format(SELECT_START_SQL, dataframes.getDataframes().get(0).getName()));
queryScript.setVariables(Collections.emptyList());
queryScript.setSourceId(dataframes.getKey());
Expand Down Expand Up @@ -268,6 +272,7 @@ public static void clearCache(String cacheKey) throws SQLException {
}

private static Dataframe execute(Connection connection, QueryScript queryScript, ExecuteParam executeParam) throws Exception {

SqlScriptRender render = new SqlScriptRender(queryScript
, executeParam
, SQL_DIALECT);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,8 @@ public class JoinCondition {

private String[] right;

public boolean isValid() {
return left != null && left.length > 0 && right != null && right.length > 0;
}

}
Loading

0 comments on commit 30a07e9

Please sign in to comment.