作者:相信奇迹的发生 | 来源:互联网 | 2023-07-28 19:37
本文整理了Java中org.apache.hadoop.mapreduce.lib.db.DBConfiguration.getOutputFieldNames()方法
本文整理了Java中org.apache.hadoop.mapreduce.lib.db.DBConfiguration.getOutputFieldNames()
方法的一些代码示例,展示了DBConfiguration.getOutputFieldNames()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。DBConfiguration.getOutputFieldNames()
方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.lib.db.DBConfiguration
类名称:DBConfiguration
方法名:getOutputFieldNames
DBConfiguration.getOutputFieldNames介绍
暂无
代码示例
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbCOnf= new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection cOnnection= dbConf.getConnection();
PreparedStatement statement = null;
DatabaseMetaData dbMeta = connection.getMetaData();
this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbCOnf= new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection cOnnection= dbConf.getConnection();
PreparedStatement statement = null;
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbCOnf= new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection cOnnection= dbConf.getConnection();
PreparedStatement statement = null;
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbCOnf= new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection cOnnection= dbConf.getConnection();
PreparedStatement statement = null;
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
/** {@inheritDoc} */
public RecordWriter getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbCOnf= new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection cOnnection= dbConf.getConnection();
PreparedStatement statement = null;
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred-test
public void testSetOutput() throws IOException {
Job job = Job.getInstance(new Configuration());
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
DBConfiguration dbCOnf= new DBConfiguration(job.getConfiguration());
String actual = format.constructQuery(dbConf.getOutputTableName()
, dbConf.getOutputFieldNames());
assertEquals(expected, actual);
job = Job.getInstance(new Configuration());
dbCOnf= new DBConfiguration(job.getConfiguration());
DBOutputFormat.setOutput(job, "hadoop_output", nullFieldNames.length);
assertNull(dbConf.getOutputFieldNames());
assertEquals(nullFieldNames.length, dbConf.getOutputFieldCount());
actual = format.constructQuery(dbConf.getOutputTableName()
, new String[dbConf.getOutputFieldCount()]);
assertEquals(nullExpected, actual);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient
public void testSetOutput() throws IOException {
Job job = Job.getInstance(new Configuration());
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
DBConfiguration dbCOnf= new DBConfiguration(job.getConfiguration());
String actual = format.constructQuery(dbConf.getOutputTableName()
, dbConf.getOutputFieldNames());
assertEquals(expected, actual);
job = Job.getInstance(new Configuration());
dbCOnf= new DBConfiguration(job.getConfiguration());
DBOutputFormat.setOutput(job, "hadoop_output", nullFieldNames.length);
assertNull(dbConf.getOutputFieldNames());
assertEquals(nullFieldNames.length, dbConf.getOutputFieldCount());
actual = format.constructQuery(dbConf.getOutputTableName()
, new String[dbConf.getOutputFieldCount()]);
assertEquals(nullExpected, actual);
}