Skip to content

Commit

Permalink
修复变更数据源配置json构建不生效问题
Browse files Browse the repository at this point in the history
  • Loading branch information
weiye committed Dec 25, 2019
1 parent f91a6ca commit 0d71fe7
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.baomidou.mybatisplus.extension.api.ApiController;
import com.baomidou.mybatisplus.extension.api.R;
import com.wugui.datax.admin.service.IJobJdbcDatasourceService;
import com.wugui.datax.admin.tool.query.BaseQueryTool;
import com.wugui.datax.admin.util.PageUtils;
import com.wugui.datax.admin.entity.JobJdbcDatasource;
import io.swagger.annotations.Api;
Expand Down Expand Up @@ -131,6 +132,7 @@ public R<Boolean> insert(@RequestBody JobJdbcDatasource entity) {
@PutMapping
@ApiOperation("修改数据")
public R<Boolean> update(@RequestBody JobJdbcDatasource entity) {
BaseQueryTool.CREATED_CONNECTIONS.remove(entity.getDatasourceName());
return success(this.jobJdbcDatasourceService.updateById(entity));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import com.wugui.datax.admin.tool.meta.DatabaseInterface;
import com.wugui.datax.admin.tool.meta.DatabaseMetaFactory;
import com.zaxxer.hikari.HikariDataSource;
import groovy.util.logging.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -30,7 +29,6 @@
* @Version 1.0
* @since 2019/7/18 9:22
*/
@Slf4j
public abstract class BaseQueryTool implements QueryToolInterface {

protected static final Logger logger = LoggerFactory.getLogger(BaseQueryTool.class);
Expand All @@ -39,16 +37,15 @@ public abstract class BaseQueryTool implements QueryToolInterface {
/**
* 用于获取查询语句
*/
protected DatabaseInterface sqlBuilder;
private DatabaseInterface sqlBuilder;

protected DataSource datasource;

protected Connection connection;
private DataSource datasource;

private Connection connection;
/**
* 当前数据库名
*/
protected String currentSchema;
private String currentSchema;

/**
* 构造方法
Expand All @@ -68,6 +65,7 @@ public abstract class BaseQueryTool implements QueryToolInterface {
dataSource.setIdleTimeout(35000);
dataSource.setMinimumIdle(0);
dataSource.setConnectionTimeout(30000);
dataSource.setConnectionTestQuery("SELECT 1");
//设为只读
dataSource.setReadOnly(true);
this.datasource = dataSource;
Expand Down
3 changes: 2 additions & 1 deletion datax-admin/src/main/resources/application.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ spring:
url: jdbc:mysql://localhost:3306/datax_web?serverTimezone=Asia/Shanghai&useLegacyDatetimeCode=false&useSSL=false&nullNamePatternMatchesAll=true&useUnicode=true&characterEncoding=UTF-8
driver-class-name: com.mysql.jdbc.Driver

## 最小空闲连接数量

hikari:
## 最小空闲连接数量
minimum-idle: 5
## 空闲连接存活最大时间,默认600000(10分钟)
idle-timeout: 180000
Expand Down
89 changes: 89 additions & 0 deletions doc/datax-web/mysql2hive.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# mysql2hive抽取

## datax作业配置文件

datax.json

```json
{
"job": {
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"connection": [
{
"jdbcUrl": [
"jdbc:mysql://127.0.0.1:3306/manual_order?useUnicode=true&characterEncoding=utf-8&useSSL=false&rewriteBatchedStatements=true"
],
"querySql": [
"select no,name,code,status,province,city,industy,score from test where id < 1000000"
]
}
],
"password": "root",
"username": "root"
}
},
"writer": {
"name": "hdfswriter",
"parameter": {
"defaultFS": "hdfs://localhost:9000",
"fileType": "text",
"path": "/user/hive/warehouse/offline.db/fgw_company_evaluate_gg/datety=2019-12-24",
"fileName": "test",
"column": [
{
"name": "no",
"type": "string"
},
{
"name": "name",
"type": "string"
},
{
"name": "code",
"type": "string"
},
{
"name": "status",
"type": "string"
},
{
"name": "province",
"type": "string"
},
{
"name": "city",
"type": "string"
},
{
"name": "industy",
"type": "string"
},
{
"name": "score",
"type": "double"
}
],
"writeMode": "append",
"fieldDelimiter": ","
}
}
}
],
"setting": {
"speed": {
"channel": 2
}
}
}
}
```

- 需要提前创建好分区,分区拼接在path中
- windows环境下datax执行hdfswriter,hdfs://localhost:9000/user/hive/warehouse/offline.db后面拼接路径符的时候会根据操作系统拼\导致临时文件的为hdfs://localhost:9000/user/hive/warehouse/db\...
删除的时候\后面不识别,会删库


0 comments on commit 0d71fe7

Please sign in to comment.