druid,调了这个方法,这个是hive的连接驱动里面的,根本就没实现
java.sql.SQLException: Method not supported
at org.apache.hive.jdbc.HiveStatement.getMaxFieldSize(HiveStatement.java:498)
at com.alibaba.druid.filter.FilterChainImpl.statement_getMaxFieldSize(FilterChainImpl.java:2915)
at com.alibaba.druid.filter.FilterAdapter.statement_getMaxFieldSize(FilterAdapter.java:2562)
at com.alibaba.druid.filter.FilterChainImpl.statement_getMaxFieldSize(FilterChainImpl.java:2913)
at com.alibaba.druid.proxy.jdbc.StatementProxyImpl.getMaxFieldSize(StatementProxyImpl.java:317)
at com.alibaba.druid.pool.DruidPooledPreparedStatement.<init>(DruidPooledPreparedStatement.java:80)
at com.alibaba.druid.pool.DruidPooledConnection.prepareStatement(DruidPooledConnection.java:359)
at org.springframework.jdbc.core.JdbcTemplate$SimplePreparedStatementCreator.createPreparedStatement(JdbcTemplate.java:1556)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:615)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:669)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:700)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:712)
at org.springframework.jdbc.core.JdbcTemplate.query(JdbcTemplate.java:763)
at com.hcttop.veem.service.impl.NbcServiceImpl.pivotQuery(NbcServiceImpl.java:224)
at com.hcttop.veem.controller.NroadmobileeqinfoController.pivotQuery(NroadmobileeqinfoController.java:63)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.web.method.support.InvocableHandlerMetho
依赖:
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.1.1</version>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.aggregate</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.22</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId>
<version>2.7.1</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.7.1</version>
</dependency>
</dependencies>
yml文件:
spring:
datasource:
mysqlMain: #mysql主数据源
password: Interface@123
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://10.10.20.10:3306/xz_environment_database?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8
username: user_interface
hive: #hive数据源
type: com.alibaba.druid.pool.DruidDataSource
url: jdbc:hive2://172.16.127.12:10000/xz_environment_database
username:
password:
driver-class-name: org.apache.hive.jdbc.HiveDriver
common-config: #连接池统一配置,应用到所有的数据源
initialSize: 1
minIdle: 1
maxIdle: 5
maxActive: 50
maxWait: 10000
timeBetweenEvictionRunsMillis: 10000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
filters: stat
数据源配置类:
@Configuration
@EnableConfigurationProperties({DataSourceProperties.class, DataSourceCommonProperties.class})//将配置类注入到bean容器,使ConfigurationProperties注解类生效
public class HiveDruidConfig {
private static Logger logger = LoggerFactory.getLogger(HiveDruidConfig.class);
@Autowired
private DataSourceProperties dataSourceProperties;
@Autowired
private DataSourceCommonProperties dataSourceCommonProperties;
@Bean("hiveDruidDataSource") //新建bean实例
@Qualifier("hiveDruidDataSource")//标识
public DataSource dataSource(){
DruidDataSource datasource = new DruidDataSource();
//配置数据源属性
datasource.setUrl(dataSourceProperties.getHive().get("url"));
datasource.setUsername(dataSourceProperties.getHive().get("username"));
datasource.setPassword(dataSourceProperties.getHive().get("password"));
datasource.setDriverClassName(dataSourceProperties.getHive().get("driver-class-name"));
//配置统一属性
datasource.setInitialSize(dataSourceCommonProperties.getInitialSize());
datasource.setMinIdle(dataSourceCommonProperties.getMinIdle());
datasource.setMaxActive(dataSourceCommonProperties.getMaxActive());
datasource.setMaxWait(dataSourceCommonProperties.getMaxWait());
datasource.setTimeBetweenEvictionRunsMillis(dataSourceCommonProperties.getTimeBetweenEvictionRunsMillis());
datasource.setMinEvictableIdleTimeMillis(dataSourceCommonProperties.getMinEvictableIdleTimeMillis());
datasource.setValidationQuery(dataSourceCommonProperties.getValidationQuery());
datasource.setTestWhileIdle(dataSourceCommonProperties.isTestWhileIdle());
datasource.setTestOnBorrow(dataSourceCommonProperties.isTestOnBorrow());
datasource.setTestOnReturn(dataSourceCommonProperties.isTestOnReturn());
datasource.setPoolPreparedStatements(dataSourceCommonProperties.isPoolPreparedStatements());
try {
datasource.setFilters(dataSourceCommonProperties.getFilters());
} catch (SQLException e) {
logger.error("Druid configuration initialization filter error.", e);
}
return datasource;
}
}
/**
* -扩展连接池,通用配置属性,可应用到所有数据源
* @author wdd
* @Date 2019年11月04日
*
*/
@Data
@ConfigurationProperties(prefix = DataSourceCommonProperties.DS, ignoreUnknownFields = false)
public class DataSourceCommonProperties {
final static String DS = "spring.datasource.common-config";
private int initialSize = 10;
private int minIdle;
private int maxIdle;
private int maxActive;
private int maxWait;
private int timeBetweenEvictionRunsMillis;
private int minEvictableIdleTimeMillis;
private String validationQuery;
private boolean testWhileIdle;
private boolean testOnBorrow;
private boolean testOnReturn;
private boolean poolPreparedStatements;
private int maxOpenPreparedStatements;
private String filters;
private String mapperLocations;
private String typeAliasPackage;
}
/**
* -统一属性控制类,获取配置文件属性
* @author wdd
* @Date 2019年11月04日
*
*/
@Data
@ConfigurationProperties(prefix = DataSourceProperties.DS, ignoreUnknownFields = false)
public class DataSourceProperties {
final static String DS = "spring.datasource";
private Map<String,String> mysqlMain;
private Map<String,String> hive;
private Map<String,String> commonConfig;
}
/**
* -注入hive数据源
* @author wdd
* @Date 2019年11月04日
*
*/
@Repository
public class HiveJdbcBaseDaoImpl {
private JdbcTemplate jdbcTemplate;
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
@Autowired
public void setJdbcTemplate(@Qualifier("hiveDruidDataSource") DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
}
/**
* -注入jdbctemplate
* @author wdd
* 2019年11月04日
*/
@Repository
public class MysqlMainJdbcBaseDaoImpl {
private JdbcTemplate jdbcTemplate;
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
@Autowired
public void setJdbcTemplate(@Qualifier("mysqlDruidDataSource") DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
}
调用方法:
List<PivotExcel> hiveList = hiveJdbcBaseDaoImpl.getJdbcTemplate().query(sql, new Object[]{}, new BeanPropertyRowMapper<PivotExcel>(PivotExcel.class));
java.sql.SQLException: Method not supported
只有工具类报错吗,你自定义的方法没有报错信息?
我也遇到了同样的问题, 题主问题解决了吗