Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
N
nnjcy-data-model
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
梁家彪
nnjcy-data-model
Commits
353d8ce0
Commit
353d8ce0
authored
Sep 05, 2023
by
wqc
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
元数据接口修改
parent
c3f0922c
Show whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
856 additions
and
37 deletions
+856
-37
dataoperation-server/src/main/java/com/zq/dataoperation/service/CommonQueryService.java
+7
-37
dataoperation-server/src/main/java/com/zq/dataoperation/utils/DataTypeEnum.java
+171
-0
dataoperation-server/src/main/java/com/zq/dataoperation/utils/SqlUtils.java
+237
-0
spider-flow-web/src/main/java/com/zq/spiderflow/controller/MetaDataController.java
+10
-0
spider-flow-web/src/main/java/com/zq/spiderflow/dao/MetaDataMappingDao.java
+3
-0
spider-flow-web/src/main/java/com/zq/spiderflow/service/MetaDataService.java
+8
-0
spider-flow-web/src/main/java/com/zq/spiderflow/util/DataTypeEnum.java
+171
-0
spider-flow-web/src/main/java/com/zq/spiderflow/util/SqlUtils.java
+237
-0
spider-flow-web/src/main/java/com/zq/spiderflow/vo/MetaDataReq.java
+2
-0
spider-flow-web/src/main/resources/mapper/MetaDataMappingMapper.xml
+10
-0
No files found.
dataoperation-server/src/main/java/com/zq/dataoperation/service/CommonQueryService.java
View file @
353d8ce0
...
...
@@ -16,6 +16,7 @@ import com.zq.dataoperation.dao.QueryDbDao;
import
com.zq.dataoperation.entity.CommonQuerySetting
;
import
com.zq.dataoperation.entity.QueryDb
;
import
com.zq.dataoperation.utils.ConnectionUtil
;
import
com.zq.dataoperation.utils.SqlUtils
;
import
com.zq.spiderflow.dao.MetaDataMappingDao
;
import
com.zq.spiderflow.entity.MetaData
;
import
com.zq.spiderflow.entity.MetaDataMapping
;
...
...
@@ -101,46 +102,15 @@ public class CommonQueryService extends ServiceImpl<CommonQuerySettingDao, Commo
}
public
Object
checkConnect
(
QueryDb
queryDb
)
{
DruidDataSource
druidDataSource
=
DruidDataSourceBuilder
.
create
().
build
();
String
url
=
""
;
String
className
=
""
;
switch
(
queryDb
.
getDbType
())
{
case
1
:
className
=
"com.mysql.cj.jdbc.Driver"
;
url
=
"jdbc:mysql://"
+
queryDb
.
getDbIp
()
+
":"
+
queryDb
.
getDbPort
()
+
"/"
+
queryDb
.
getDbName
()
+
"?characterEncoding=utf8&useSSL=false&serverTimezone=GMT%2B8&autoReconnect=true"
;
break
;
case
2
:
className
=
"oracle.jdbc.driver.OracleDriver"
;
url
=
"jdbc:oracle:thin:@//"
+
queryDb
.
getDbIp
()
+
":"
+
queryDb
.
getDbPort
()
+
"/"
+
queryDb
.
getDbName
();
break
;
case
3
:
className
=
"com.microsoft.sqlserver.jdbc.SQLServerDriver"
;
url
=
"jdbc:sqlserver://"
+
queryDb
.
getDbIp
()
+
":"
+
queryDb
.
getDbPort
()
+
"/"
+
queryDb
.
getDbName
();
break
;
default
:
throw
new
BusinessException
(
"不支持的数据库"
);
}
QueryDb
queryDb1
=
queryDbDao
.
selectById
(
queryDb
.
getId
());
//jdbc:mysql://47.107.148.253:3306/nnjcy_data_model
String
jdbcUrl
=
queryDb1
.
getDbIp
();
try
{
druidDataSource
.
setDriverClassName
(
className
);
druidDataSource
.
setUrl
(
url
);
druidDataSource
.
setUsername
(
queryDb
.
getUsername
());
druidDataSource
.
setPassword
(
queryDb
.
getPassword
());
druidDataSource
.
init
();
// if (druidDataSource.isInited()==true) {
// return ResultVo.success("连接成功");
// }else {
// return ResultVo.success("连接失败");
// }
// connectionMap.put(queryDb.getId(), connection);
if
(
druidDataSource
.
getConnection
().
isValid
(
10000
))
{
return
ResultVo
.
success
(
"连接成功"
);
}
else
if
(
druidDataSource
.
getConnection
().
isClosed
())
{
return
ResultVo
.
success
(
"连接失败"
);
}
return
SqlUtils
.
testConnection
(
jdbcUrl
,
queryDb1
.
getUsername
(),
queryDb1
.
getPassword
());
}
catch
(
Exception
e
)
{
return
ResultVo
.
fail
(
e
.
getMessage
());
log
.
error
(
e
.
getMessage
());
return
false
;
}
return
ResultVo
.
success
();
}
public
List
<
Map
>
runSelect
(
Map
<
String
,
Object
>
body
)
throws
Exception
{
...
...
dataoperation-server/src/main/java/com/zq/dataoperation/utils/DataTypeEnum.java
0 → 100644
View file @
353d8ce0
/*
* <<
* Davinci
* ==
* Copyright (C) 2016 - 2019 EDP
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*
*/
package
com
.
zq
.
dataoperation
.
utils
;
import
lombok.extern.slf4j.Slf4j
;
/**
* @author /
*/
@Slf4j
@SuppressWarnings
({
"unchecked"
,
"all"
})
public
enum
DataTypeEnum
{
/**
* mysql
*/
MYSQL
(
"mysql"
,
"mysql"
,
"com.mysql.jdbc.Driver"
,
"`"
,
"`"
,
"'"
,
"'"
),
/**
* oracle
*/
ORACLE
(
"oracle"
,
"oracle"
,
"oracle.jdbc.driver.OracleDriver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* sql server
*/
SQLSERVER
(
"sqlserver"
,
"sqlserver"
,
"com.microsoft.sqlserver.jdbc.SQLServerDriver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* h2
*/
H2
(
"h2"
,
"h2"
,
"org.h2.Driver"
,
"`"
,
"`"
,
"\""
,
"\""
),
/**
* phoenix
*/
PHOENIX
(
"phoenix"
,
"hbase phoenix"
,
"org.apache.phoenix.jdbc.PhoenixDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* mongo
*/
MONGODB
(
"mongo"
,
"mongodb"
,
"mongodb.jdbc.MongoDriver"
,
"`"
,
"`"
,
"\""
,
"\""
),
/**
* sql4es
*/
ELASTICSEARCH
(
"sql4es"
,
"elasticsearch"
,
"nl.anchormen.sql4es.jdbc.ESDriver"
,
""
,
""
,
"'"
,
"'"
),
/**
* presto
*/
PRESTO
(
"presto"
,
"presto"
,
"com.facebook.presto.jdbc.PrestoDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* moonbox
*/
MOONBOX
(
"moonbox"
,
"moonbox"
,
"moonbox.jdbc.MbDriver"
,
"`"
,
"`"
,
"`"
,
"`"
),
/**
* cassandra
*/
CASSANDRA
(
"cassandra"
,
"cassandra"
,
"com.github.adejanovski.cassandra.jdbc.CassandraDriver"
,
""
,
""
,
"'"
,
"'"
),
/**
* click house
*/
CLICKHOUSE
(
"clickhouse"
,
"clickhouse"
,
"ru.yandex.clickhouse.ClickHouseDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* kylin
*/
KYLIN
(
"kylin"
,
"kylin"
,
"org.apache.kylin.jdbc.Driver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* vertica
*/
VERTICA
(
"vertica"
,
"vertica"
,
"com.vertica.jdbc.Driver"
,
""
,
""
,
"'"
,
"'"
),
/**
* sap
*/
HANA
(
"sap"
,
"sap hana"
,
"com.sap.db.jdbc.Driver"
,
""
,
""
,
"'"
,
"'"
),
/**
* impala
*/
IMPALA
(
"impala"
,
"impala"
,
"com.cloudera.impala.jdbc41.Driver"
,
""
,
""
,
"'"
,
"'"
);
private
String
feature
;
private
String
desc
;
private
String
driver
;
private
String
keywordPrefix
;
private
String
keywordSuffix
;
private
String
aliasPrefix
;
private
String
aliasSuffix
;
private
static
final
String
JDBC_URL_PREFIX
=
"jdbc:"
;
DataTypeEnum
(
String
feature
,
String
desc
,
String
driver
,
String
keywordPrefix
,
String
keywordSuffix
,
String
aliasPrefix
,
String
aliasSuffix
)
{
this
.
feature
=
feature
;
this
.
desc
=
desc
;
this
.
driver
=
driver
;
this
.
keywordPrefix
=
keywordPrefix
;
this
.
keywordSuffix
=
keywordSuffix
;
this
.
aliasPrefix
=
aliasPrefix
;
this
.
aliasSuffix
=
aliasSuffix
;
}
public
static
DataTypeEnum
urlOf
(
String
jdbcUrl
)
{
String
url
=
jdbcUrl
.
toLowerCase
().
trim
();
for
(
DataTypeEnum
dataTypeEnum
:
values
())
{
if
(
url
.
startsWith
(
JDBC_URL_PREFIX
+
dataTypeEnum
.
feature
))
{
try
{
Class
<?>
aClass
=
Class
.
forName
(
dataTypeEnum
.
getDriver
());
if
(
null
==
aClass
)
{
throw
new
RuntimeException
(
"Unable to get driver instance for jdbcUrl: "
+
jdbcUrl
);
}
}
catch
(
ClassNotFoundException
e
)
{
throw
new
RuntimeException
(
"Unable to get driver instance: "
+
jdbcUrl
);
}
return
dataTypeEnum
;
}
}
return
null
;
}
public
String
getFeature
()
{
return
feature
;
}
public
String
getDesc
()
{
return
desc
;
}
public
String
getDriver
()
{
return
driver
;
}
public
String
getKeywordPrefix
()
{
return
keywordPrefix
;
}
public
String
getKeywordSuffix
()
{
return
keywordSuffix
;
}
public
String
getAliasPrefix
()
{
return
aliasPrefix
;
}
public
String
getAliasSuffix
()
{
return
aliasSuffix
;
}
}
dataoperation-server/src/main/java/com/zq/dataoperation/utils/SqlUtils.java
0 → 100644
View file @
353d8ce0
/*
* Copyright 2019-2020 Zheng Jie
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
zq
.
dataoperation
.
utils
;
import
cn.hutool.crypto.SecureUtil
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.alibaba.druid.util.StringUtils
;
import
com.google.common.collect.Lists
;
import
lombok.extern.slf4j.Slf4j
;
import
javax.sql.DataSource
;
import
java.io.BufferedReader
;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.InputStreamReader
;
import
java.nio.charset.StandardCharsets
;
import
java.sql.*
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
/**
* @author /
*/
@Slf4j
public
class
SqlUtils
{
public
static
final
String
COLON
=
":"
;
private
static
volatile
Map
<
String
,
DruidDataSource
>
map
=
new
HashMap
<>();
private
static
String
getKey
(
String
jdbcUrl
,
String
username
,
String
password
)
{
StringBuilder
sb
=
new
StringBuilder
();
if
(!
StringUtils
.
isEmpty
(
username
))
{
sb
.
append
(
username
);
}
if
(!
StringUtils
.
isEmpty
(
password
))
{
sb
.
append
(
COLON
).
append
(
password
);
}
sb
.
append
(
COLON
).
append
(
jdbcUrl
.
trim
());
return
SecureUtil
.
md5
(
sb
.
toString
());
}
/**
* 获取数据源
*
* @param jdbcUrl /
* @param userName /
* @param password /
* @return DataSource
*/
private
static
DataSource
getDataSource
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
String
key
=
getKey
(
jdbcUrl
,
userName
,
password
);
if
(!
map
.
containsKey
(
key
)
||
null
==
map
.
get
(
key
))
{
DruidDataSource
druidDataSource
=
new
DruidDataSource
();
String
className
;
try
{
className
=
DriverManager
.
getDriver
(
jdbcUrl
.
trim
()).
getClass
().
getName
();
}
catch
(
SQLException
e
)
{
throw
new
RuntimeException
(
"Get class name error: ="
+
jdbcUrl
);
}
if
(
StringUtils
.
isEmpty
(
className
))
{
DataTypeEnum
dataTypeEnum
=
DataTypeEnum
.
urlOf
(
jdbcUrl
);
if
(
null
==
dataTypeEnum
)
{
throw
new
RuntimeException
(
"Not supported data type: jdbcUrl="
+
jdbcUrl
);
}
druidDataSource
.
setDriverClassName
(
dataTypeEnum
.
getDriver
());
}
else
{
druidDataSource
.
setDriverClassName
(
className
);
}
druidDataSource
.
setUrl
(
jdbcUrl
);
druidDataSource
.
setUsername
(
userName
);
druidDataSource
.
setPassword
(
password
);
// 配置获取连接等待超时的时间
druidDataSource
.
setMaxWait
(
3000
);
// 配置初始化大小、最小、最大
druidDataSource
.
setInitialSize
(
1
);
druidDataSource
.
setMinIdle
(
1
);
druidDataSource
.
setMaxActive
(
1
);
// 配置间隔多久才进行一次检测需要关闭的空闲连接,单位是毫秒
druidDataSource
.
setTimeBetweenEvictionRunsMillis
(
50000
);
// 配置一旦重试多次失败后等待多久再继续重试连接,单位是毫秒
druidDataSource
.
setTimeBetweenConnectErrorMillis
(
18000
);
// 配置一个连接在池中最小生存的时间,单位是毫秒
druidDataSource
.
setMinEvictableIdleTimeMillis
(
300000
);
// 这个特性能解决 MySQL 服务器8小时关闭连接的问题
druidDataSource
.
setMaxEvictableIdleTimeMillis
(
25200000
);
try
{
druidDataSource
.
init
();
}
catch
(
SQLException
e
)
{
log
.
error
(
"Exception during pool initialization"
,
e
);
throw
new
RuntimeException
(
e
.
getMessage
());
}
map
.
put
(
key
,
druidDataSource
);
}
return
map
.
get
(
key
);
}
private
static
Connection
getConnection
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
DataSource
dataSource
=
getDataSource
(
jdbcUrl
,
userName
,
password
);
Connection
connection
=
null
;
try
{
connection
=
dataSource
.
getConnection
();
}
catch
(
Exception
ignored
)
{
}
try
{
int
timeOut
=
5
;
if
(
null
==
connection
||
connection
.
isClosed
()
||
!
connection
.
isValid
(
timeOut
))
{
log
.
info
(
"connection is closed or invalid, retry get connection!"
);
connection
=
dataSource
.
getConnection
();
}
}
catch
(
Exception
e
)
{
log
.
error
(
"create connection error, jdbcUrl: {}"
,
jdbcUrl
);
throw
new
RuntimeException
(
"create connection error, jdbcUrl: "
+
jdbcUrl
);
}
return
connection
;
}
private
static
void
releaseConnection
(
Connection
connection
)
{
if
(
null
!=
connection
)
{
try
{
connection
.
close
();
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
log
.
error
(
"connection close error:"
+
e
.
getMessage
());
}
}
}
public
static
void
closeResult
(
ResultSet
rs
)
{
if
(
rs
!=
null
)
{
try
{
rs
.
close
();
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
}
}
public
static
boolean
testConnection
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
Connection
connection
=
null
;
try
{
connection
=
getConnection
(
jdbcUrl
,
userName
,
password
);
if
(
null
!=
connection
)
{
return
true
;
}
}
catch
(
Exception
e
)
{
log
.
info
(
"Get connection failed:"
+
e
.
getMessage
());
}
finally
{
releaseConnection
(
connection
);
}
return
false
;
}
public
static
String
executeFile
(
String
jdbcUrl
,
String
userName
,
String
password
,
File
sqlFile
)
{
Connection
connection
=
getConnection
(
jdbcUrl
,
userName
,
password
);
try
{
batchExecute
(
connection
,
readSqlList
(
sqlFile
));
}
catch
(
Exception
e
)
{
log
.
error
(
"sql脚本执行发生异常:{}"
,
e
.
getMessage
());
return
e
.
getMessage
();
}
finally
{
releaseConnection
(
connection
);
}
return
"success"
;
}
/**
* 批量执行sql
*
* @param connection /
* @param sqlList /
*/
public
static
void
batchExecute
(
Connection
connection
,
List
<
String
>
sqlList
)
throws
SQLException
{
Statement
st
=
connection
.
createStatement
();
for
(
String
sql
:
sqlList
)
{
if
(
sql
.
endsWith
(
";"
))
{
sql
=
sql
.
substring
(
0
,
sql
.
length
()
-
1
);
}
st
.
addBatch
(
sql
);
}
st
.
executeBatch
();
}
/**
* 将文件中的sql语句以;为单位读取到列表中
*
* @param sqlFile /
* @return /
* @throws Exception e
*/
private
static
List
<
String
>
readSqlList
(
File
sqlFile
)
throws
Exception
{
List
<
String
>
sqlList
=
Lists
.
newArrayList
();
StringBuilder
sb
=
new
StringBuilder
();
try
(
BufferedReader
reader
=
new
BufferedReader
(
new
InputStreamReader
(
new
FileInputStream
(
sqlFile
),
StandardCharsets
.
UTF_8
)))
{
String
tmp
;
while
((
tmp
=
reader
.
readLine
())
!=
null
)
{
log
.
info
(
"line:{}"
,
tmp
);
if
(
tmp
.
endsWith
(
";"
))
{
sb
.
append
(
tmp
);
sqlList
.
add
(
sb
.
toString
());
sb
.
delete
(
0
,
sb
.
length
());
}
else
{
sb
.
append
(
tmp
);
}
}
if
(!
""
.
endsWith
(
sb
.
toString
().
trim
()))
{
sqlList
.
add
(
sb
.
toString
());
}
}
return
sqlList
;
}
}
spider-flow-web/src/main/java/com/zq/spiderflow/controller/MetaDataController.java
View file @
353d8ce0
...
...
@@ -46,6 +46,16 @@ public class MetaDataController {
}
/**
* 获取全部元数据
* @return
*/
@ApiOperation
(
"获取全部元数据"
)
@PostMapping
(
"/getMetaByTable"
)
public
ResultVo
getMetaByTable
(
@RequestBody
MetaDataReq
req
)
{
return
ResultVo
.
success
(
metaDataService
.
getMetaByTable
(
req
));
}
/**
* 根据id获取元数据
* @param id
* @return
...
...
spider-flow-web/src/main/java/com/zq/spiderflow/dao/MetaDataMappingDao.java
View file @
353d8ce0
package
com
.
zq
.
spiderflow
.
dao
;
import
com.baomidou.mybatisplus.core.mapper.BaseMapper
;
import
com.zq.spiderflow.vo.MetaDataReq
;
import
org.springframework.stereotype.Repository
;
import
com.zq.spiderflow.entity.MetaDataMapping
;
...
...
@@ -16,4 +17,6 @@ public interface MetaDataMappingDao extends BaseMapper<MetaDataMapping> {
List
<
MetaDataMapping
>
getSqlName
();
List
<
MetaDataMapping
>
selectByCateId
(
Long
id
);
List
<
MetaDataReq
>
getMetaByTable
(
String
tableSqlName
);
}
spider-flow-web/src/main/java/com/zq/spiderflow/service/MetaDataService.java
View file @
353d8ce0
...
...
@@ -157,6 +157,14 @@ public class MetaDataService extends ServiceImpl<MetaDataDao, MetaData> {
if
(
StringUtils
.
isNotBlank
(
req
.
getMetaName
()))
{
queryWrapper
.
like
(
MetaData:
:
getMetaName
,
req
.
getMetaName
());
}
if
(
StringUtils
.
isNotBlank
(
req
.
getEnglishName
()))
{
queryWrapper
.
like
(
MetaData:
:
getEnglishName
,
req
.
getEnglishName
());
}
return
PagingUtils
.
paging
(
req
,
metaDataDao
,
queryWrapper
,
MetaData
.
class
);
}
public
List
<
MetaDataReq
>
getMetaByTable
(
MetaDataReq
req
)
{
List
<
MetaDataReq
>
list
=
metaDataMappingDao
.
getMetaByTable
(
req
.
getTableSqlName
());
return
list
;
}
}
spider-flow-web/src/main/java/com/zq/spiderflow/util/DataTypeEnum.java
0 → 100644
View file @
353d8ce0
/*
* <<
* Davinci
* ==
* Copyright (C) 2016 - 2019 EDP
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*
*/
package
com
.
zq
.
spiderflow
.
util
;
import
lombok.extern.slf4j.Slf4j
;
/**
* @author /
*/
@Slf4j
@SuppressWarnings
({
"unchecked"
,
"all"
})
public
enum
DataTypeEnum
{
/**
* mysql
*/
MYSQL
(
"mysql"
,
"mysql"
,
"com.mysql.jdbc.Driver"
,
"`"
,
"`"
,
"'"
,
"'"
),
/**
* oracle
*/
ORACLE
(
"oracle"
,
"oracle"
,
"oracle.jdbc.driver.OracleDriver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* sql server
*/
SQLSERVER
(
"sqlserver"
,
"sqlserver"
,
"com.microsoft.sqlserver.jdbc.SQLServerDriver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* h2
*/
H2
(
"h2"
,
"h2"
,
"org.h2.Driver"
,
"`"
,
"`"
,
"\""
,
"\""
),
/**
* phoenix
*/
PHOENIX
(
"phoenix"
,
"hbase phoenix"
,
"org.apache.phoenix.jdbc.PhoenixDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* mongo
*/
MONGODB
(
"mongo"
,
"mongodb"
,
"mongodb.jdbc.MongoDriver"
,
"`"
,
"`"
,
"\""
,
"\""
),
/**
* sql4es
*/
ELASTICSEARCH
(
"sql4es"
,
"elasticsearch"
,
"nl.anchormen.sql4es.jdbc.ESDriver"
,
""
,
""
,
"'"
,
"'"
),
/**
* presto
*/
PRESTO
(
"presto"
,
"presto"
,
"com.facebook.presto.jdbc.PrestoDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* moonbox
*/
MOONBOX
(
"moonbox"
,
"moonbox"
,
"moonbox.jdbc.MbDriver"
,
"`"
,
"`"
,
"`"
,
"`"
),
/**
* cassandra
*/
CASSANDRA
(
"cassandra"
,
"cassandra"
,
"com.github.adejanovski.cassandra.jdbc.CassandraDriver"
,
""
,
""
,
"'"
,
"'"
),
/**
* click house
*/
CLICKHOUSE
(
"clickhouse"
,
"clickhouse"
,
"ru.yandex.clickhouse.ClickHouseDriver"
,
""
,
""
,
"\""
,
"\""
),
/**
* kylin
*/
KYLIN
(
"kylin"
,
"kylin"
,
"org.apache.kylin.jdbc.Driver"
,
"\""
,
"\""
,
"\""
,
"\""
),
/**
* vertica
*/
VERTICA
(
"vertica"
,
"vertica"
,
"com.vertica.jdbc.Driver"
,
""
,
""
,
"'"
,
"'"
),
/**
* sap
*/
HANA
(
"sap"
,
"sap hana"
,
"com.sap.db.jdbc.Driver"
,
""
,
""
,
"'"
,
"'"
),
/**
* impala
*/
IMPALA
(
"impala"
,
"impala"
,
"com.cloudera.impala.jdbc41.Driver"
,
""
,
""
,
"'"
,
"'"
);
private
String
feature
;
private
String
desc
;
private
String
driver
;
private
String
keywordPrefix
;
private
String
keywordSuffix
;
private
String
aliasPrefix
;
private
String
aliasSuffix
;
private
static
final
String
JDBC_URL_PREFIX
=
"jdbc:"
;
DataTypeEnum
(
String
feature
,
String
desc
,
String
driver
,
String
keywordPrefix
,
String
keywordSuffix
,
String
aliasPrefix
,
String
aliasSuffix
)
{
this
.
feature
=
feature
;
this
.
desc
=
desc
;
this
.
driver
=
driver
;
this
.
keywordPrefix
=
keywordPrefix
;
this
.
keywordSuffix
=
keywordSuffix
;
this
.
aliasPrefix
=
aliasPrefix
;
this
.
aliasSuffix
=
aliasSuffix
;
}
public
static
DataTypeEnum
urlOf
(
String
jdbcUrl
)
{
String
url
=
jdbcUrl
.
toLowerCase
().
trim
();
for
(
DataTypeEnum
dataTypeEnum
:
values
())
{
if
(
url
.
startsWith
(
JDBC_URL_PREFIX
+
dataTypeEnum
.
feature
))
{
try
{
Class
<?>
aClass
=
Class
.
forName
(
dataTypeEnum
.
getDriver
());
if
(
null
==
aClass
)
{
throw
new
RuntimeException
(
"Unable to get driver instance for jdbcUrl: "
+
jdbcUrl
);
}
}
catch
(
ClassNotFoundException
e
)
{
throw
new
RuntimeException
(
"Unable to get driver instance: "
+
jdbcUrl
);
}
return
dataTypeEnum
;
}
}
return
null
;
}
public
String
getFeature
()
{
return
feature
;
}
public
String
getDesc
()
{
return
desc
;
}
public
String
getDriver
()
{
return
driver
;
}
public
String
getKeywordPrefix
()
{
return
keywordPrefix
;
}
public
String
getKeywordSuffix
()
{
return
keywordSuffix
;
}
public
String
getAliasPrefix
()
{
return
aliasPrefix
;
}
public
String
getAliasSuffix
()
{
return
aliasSuffix
;
}
}
spider-flow-web/src/main/java/com/zq/spiderflow/util/SqlUtils.java
0 → 100644
View file @
353d8ce0
/*
* Copyright 2019-2020 Zheng Jie
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
zq
.
spiderflow
.
util
;
import
cn.hutool.crypto.SecureUtil
;
import
com.alibaba.druid.pool.DruidDataSource
;
import
com.alibaba.druid.util.StringUtils
;
import
com.google.common.collect.Lists
;
import
lombok.extern.slf4j.Slf4j
;
import
javax.sql.DataSource
;
import
java.io.BufferedReader
;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.InputStreamReader
;
import
java.nio.charset.StandardCharsets
;
import
java.sql.*
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
/**
* @author /
*/
@Slf4j
public
class
SqlUtils
{
public
static
final
String
COLON
=
":"
;
private
static
volatile
Map
<
String
,
DruidDataSource
>
map
=
new
HashMap
<>();
private
static
String
getKey
(
String
jdbcUrl
,
String
username
,
String
password
)
{
StringBuilder
sb
=
new
StringBuilder
();
if
(!
StringUtils
.
isEmpty
(
username
))
{
sb
.
append
(
username
);
}
if
(!
StringUtils
.
isEmpty
(
password
))
{
sb
.
append
(
COLON
).
append
(
password
);
}
sb
.
append
(
COLON
).
append
(
jdbcUrl
.
trim
());
return
SecureUtil
.
md5
(
sb
.
toString
());
}
/**
* 获取数据源
*
* @param jdbcUrl /
* @param userName /
* @param password /
* @return DataSource
*/
private
static
DataSource
getDataSource
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
String
key
=
getKey
(
jdbcUrl
,
userName
,
password
);
if
(!
map
.
containsKey
(
key
)
||
null
==
map
.
get
(
key
))
{
DruidDataSource
druidDataSource
=
new
DruidDataSource
();
String
className
;
try
{
className
=
DriverManager
.
getDriver
(
jdbcUrl
.
trim
()).
getClass
().
getName
();
}
catch
(
SQLException
e
)
{
throw
new
RuntimeException
(
"Get class name error: ="
+
jdbcUrl
);
}
if
(
StringUtils
.
isEmpty
(
className
))
{
DataTypeEnum
dataTypeEnum
=
DataTypeEnum
.
urlOf
(
jdbcUrl
);
if
(
null
==
dataTypeEnum
)
{
throw
new
RuntimeException
(
"Not supported data type: jdbcUrl="
+
jdbcUrl
);
}
druidDataSource
.
setDriverClassName
(
dataTypeEnum
.
getDriver
());
}
else
{
druidDataSource
.
setDriverClassName
(
className
);
}
druidDataSource
.
setUrl
(
jdbcUrl
);
druidDataSource
.
setUsername
(
userName
);
druidDataSource
.
setPassword
(
password
);
// 配置获取连接等待超时的时间
druidDataSource
.
setMaxWait
(
3000
);
// 配置初始化大小、最小、最大
druidDataSource
.
setInitialSize
(
1
);
druidDataSource
.
setMinIdle
(
1
);
druidDataSource
.
setMaxActive
(
1
);
// 配置间隔多久才进行一次检测需要关闭的空闲连接,单位是毫秒
druidDataSource
.
setTimeBetweenEvictionRunsMillis
(
50000
);
// 配置一旦重试多次失败后等待多久再继续重试连接,单位是毫秒
druidDataSource
.
setTimeBetweenConnectErrorMillis
(
18000
);
// 配置一个连接在池中最小生存的时间,单位是毫秒
druidDataSource
.
setMinEvictableIdleTimeMillis
(
300000
);
// 这个特性能解决 MySQL 服务器8小时关闭连接的问题
druidDataSource
.
setMaxEvictableIdleTimeMillis
(
25200000
);
try
{
druidDataSource
.
init
();
}
catch
(
SQLException
e
)
{
log
.
error
(
"Exception during pool initialization"
,
e
);
throw
new
RuntimeException
(
e
.
getMessage
());
}
map
.
put
(
key
,
druidDataSource
);
}
return
map
.
get
(
key
);
}
private
static
Connection
getConnection
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
DataSource
dataSource
=
getDataSource
(
jdbcUrl
,
userName
,
password
);
Connection
connection
=
null
;
try
{
connection
=
dataSource
.
getConnection
();
}
catch
(
Exception
ignored
)
{
}
try
{
int
timeOut
=
5
;
if
(
null
==
connection
||
connection
.
isClosed
()
||
!
connection
.
isValid
(
timeOut
))
{
log
.
info
(
"connection is closed or invalid, retry get connection!"
);
connection
=
dataSource
.
getConnection
();
}
}
catch
(
Exception
e
)
{
log
.
error
(
"create connection error, jdbcUrl: {}"
,
jdbcUrl
);
throw
new
RuntimeException
(
"create connection error, jdbcUrl: "
+
jdbcUrl
);
}
return
connection
;
}
private
static
void
releaseConnection
(
Connection
connection
)
{
if
(
null
!=
connection
)
{
try
{
connection
.
close
();
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
log
.
error
(
"connection close error:"
+
e
.
getMessage
());
}
}
}
public
static
void
closeResult
(
ResultSet
rs
)
{
if
(
rs
!=
null
)
{
try
{
rs
.
close
();
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
}
}
}
public
static
boolean
testConnection
(
String
jdbcUrl
,
String
userName
,
String
password
)
{
Connection
connection
=
null
;
try
{
connection
=
getConnection
(
jdbcUrl
,
userName
,
password
);
if
(
null
!=
connection
)
{
return
true
;
}
}
catch
(
Exception
e
)
{
log
.
info
(
"Get connection failed:"
+
e
.
getMessage
());
}
finally
{
releaseConnection
(
connection
);
}
return
false
;
}
public
static
String
executeFile
(
String
jdbcUrl
,
String
userName
,
String
password
,
File
sqlFile
)
{
Connection
connection
=
getConnection
(
jdbcUrl
,
userName
,
password
);
try
{
batchExecute
(
connection
,
readSqlList
(
sqlFile
));
}
catch
(
Exception
e
)
{
log
.
error
(
"sql脚本执行发生异常:{}"
,
e
.
getMessage
());
return
e
.
getMessage
();
}
finally
{
releaseConnection
(
connection
);
}
return
"success"
;
}
/**
* 批量执行sql
*
* @param connection /
* @param sqlList /
*/
public
static
void
batchExecute
(
Connection
connection
,
List
<
String
>
sqlList
)
throws
SQLException
{
Statement
st
=
connection
.
createStatement
();
for
(
String
sql
:
sqlList
)
{
if
(
sql
.
endsWith
(
";"
))
{
sql
=
sql
.
substring
(
0
,
sql
.
length
()
-
1
);
}
st
.
addBatch
(
sql
);
}
st
.
executeBatch
();
}
/**
* 将文件中的sql语句以;为单位读取到列表中
*
* @param sqlFile /
* @return /
* @throws Exception e
*/
private
static
List
<
String
>
readSqlList
(
File
sqlFile
)
throws
Exception
{
List
<
String
>
sqlList
=
Lists
.
newArrayList
();
StringBuilder
sb
=
new
StringBuilder
();
try
(
BufferedReader
reader
=
new
BufferedReader
(
new
InputStreamReader
(
new
FileInputStream
(
sqlFile
),
StandardCharsets
.
UTF_8
)))
{
String
tmp
;
while
((
tmp
=
reader
.
readLine
())
!=
null
)
{
log
.
info
(
"line:{}"
,
tmp
);
if
(
tmp
.
endsWith
(
";"
))
{
sb
.
append
(
tmp
);
sqlList
.
add
(
sb
.
toString
());
sb
.
delete
(
0
,
sb
.
length
());
}
else
{
sb
.
append
(
tmp
);
}
}
if
(!
""
.
endsWith
(
sb
.
toString
().
trim
()))
{
sqlList
.
add
(
sb
.
toString
());
}
}
return
sqlList
;
}
}
spider-flow-web/src/main/java/com/zq/spiderflow/vo/MetaDataReq.java
View file @
353d8ce0
...
...
@@ -57,4 +57,6 @@ public class MetaDataReq extends PageReqVo {
private
Long
categoryId
;
private
Long
queryDbId
;
private
String
tableSqlName
;
}
spider-flow-web/src/main/resources/mapper/MetaDataMappingMapper.xml
View file @
353d8ce0
...
...
@@ -43,4 +43,14 @@
LEFT JOIN meta_data_category y ON m.category_id = y.id
WHERE y.id=#{id}
</select>
<select
id=
"getMetaByTable"
resultType=
"com.zq.spiderflow.vo.MetaDataReq"
>
SELECT
a.*,
p.table_sqlname
FROM
meta_data a
LEFT JOIN meta_data_mapping p ON a.id = p.metadata_id
WHERE
p.table_sqlname = #{tableSqlName}
</select>
</mapper>
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment