• 如果您觉得本站非常有看点,那么赶紧使用Ctrl+D 收藏吧

Java LargeObjectLoader类的典型用法和代码示例

java 2次浏览

本文整理汇总了Java中com.cloudera.sqoop.lib.LargeObjectLoader的典型用法代码示例。如果您正苦于以下问题:Java LargeObjectLoader类的具体用法?Java LargeObjectLoader怎么用?Java LargeObjectLoader使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。

LargeObjectLoader类属于com.cloudera.sqoop.lib包,在下文中一共展示了LargeObjectLoader类的17个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: setup

点赞 3

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  this.conf = context.getConfiguration();
  this.lobLoader = new LargeObjectLoader(this.conf, new Path( this.conf.get("sqoop.hbase.lob.extern.dir", "/tmp/sqoop-hbase-" + context.getTaskAttemptID())));

  // Get the implementation of PutTransformer to use.
  // By default, we call toString() on every non-null field.
  Class<? extends PutTransformer> xformerClass =
      (Class<? extends PutTransformer>)
      this.conf.getClass(TRANSFORMER_CLASS_KEY, ToStringPutTransformer.class);
  this.putTransformer = (PutTransformer)
      ReflectionUtils.newInstance(xformerClass, this.conf);
  if (null == putTransformer) {
    throw new RuntimeException("Could not instantiate PutTransformer.");
  }
  this.putTransformer.setColumnFamily(conf.get(COL_FAMILY_KEY, null));
  this.putTransformer.setRowKeyColumn(conf.get(ROW_KEY_COLUMN_KEY, null));
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:20,
代码来源:HBaseBulkImportMapper.java

示例2: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  schema = ParquetJob.getAvroSchema(conf);
  bigDecimalFormatString = conf.getBoolean(
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
  lobLoader = new LargeObjectLoader(conf, new Path(conf.get("sqoop.kite.lob.extern.dir", "/tmp/sqoop-parquet-" + context.getTaskAttemptID())));
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:11,
代码来源:ParquetImportMapper.java

示例3: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  schema = AvroJob.getMapOutputSchema(conf);
  lobLoader = new LargeObjectLoader(conf, FileOutputFormat.getWorkOutputPath(context));
  bigDecimalFormatString = conf.getBoolean(
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:11,
代码来源:AvroImportMapper.java

示例4: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  schema = AvroJob.getMapOutputSchema(conf);
  lobLoader = new LargeObjectLoader(conf,
      FileOutputFormat.getWorkOutputPath(context));
  bigDecimalFormatString = conf.getBoolean(
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
      ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
}
 

开发者ID:unicredit,
项目名称:zSqoop,
代码行数:12,
代码来源:AvroImportMapper.java

示例5: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  schema = AvroJob.getMapOutputSchema(context.getConfiguration());
  lobLoader = new LargeObjectLoader(context.getConfiguration(),
      FileOutputFormat.getWorkOutputPath(context));
}
 

开发者ID:infinidb,
项目名称:sqoop,
代码行数:8,
代码来源:AvroImportMapper.java

示例6: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  this.lobLoader = new LargeObjectLoader(context.getConfiguration(), FileOutputFormat.getWorkOutputPath(context));
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:6,
代码来源:SequenceFileImportMapper.java

示例7: SqoopHCatImportHelper

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
public SqoopHCatImportHelper(Configuration conf) throws IOException,
  InterruptedException {

  String inputJobInfoStr = conf.get(HCatConstants.HCAT_KEY_JOB_INFO);
  jobInfo = (InputJobInfo) HCatUtil.deserialize(inputJobInfoStr);
  dataColsSchema = jobInfo.getTableInfo().getDataColumns();
  partitionSchema = jobInfo.getTableInfo().getPartitionColumns();
  StringBuilder storerInfoStr = new StringBuilder(1024);
  StorerInfo storerInfo = jobInfo.getTableInfo().getStorerInfo();
  storerInfoStr.append("HCatalog Storer Info : ").append("\n\tHandler = ")
    .append(storerInfo.getStorageHandlerClass())
    .append("\n\tInput format class = ").append(storerInfo.getIfClass())
    .append("\n\tOutput format class = ").append(storerInfo.getOfClass())
    .append("\n\tSerde class = ").append(storerInfo.getSerdeClass());
  Properties storerProperties = storerInfo.getProperties();
  if (!storerProperties.isEmpty()) {
    storerInfoStr.append("\nStorer properties ");
    for (Map.Entry<Object, Object> entry : storerProperties.entrySet()) {
      String key = (String) entry.getKey();
      Object val = entry.getValue();
      storerInfoStr.append("\n\t").append(key).append('=').append(val);
    }
  }
  storerInfoStr.append("\n");
  LOG.info(storerInfoStr);

  hCatFullTableSchema = new HCatSchema(dataColsSchema.getFields());
  for (HCatFieldSchema hfs : partitionSchema.getFields()) {
    hCatFullTableSchema.append(hfs);
  }
  fieldCount = hCatFullTableSchema.size();
  lobLoader = new LargeObjectLoader(conf, new Path(jobInfo.getTableInfo()
    .getTableLocation()));
  bigDecimalFormatString = conf.getBoolean(
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
  debugHCatImportMapper = conf.getBoolean(
    SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP, false);
  IntWritable[] delimChars = DefaultStringifier.loadArray(conf,
    SqoopHCatUtilities.HIVE_DELIMITERS_TO_REPLACE_PROP, IntWritable.class);
  hiveDelimiters = new DelimiterSet((char) delimChars[0].get(),
    (char) delimChars[1].get(), (char) delimChars[2].get(),
    (char) delimChars[3].get(), delimChars[4].get() == 1 ? true : false);
  hiveDelimsReplacement = conf
    .get(SqoopHCatUtilities.HIVE_DELIMITERS_REPLACEMENT_PROP);
  if (hiveDelimsReplacement == null) {
    hiveDelimsReplacement = "";
  }
  doHiveDelimsReplacement = Boolean.valueOf(conf
    .get(SqoopHCatUtilities.HIVE_DELIMITERS_REPLACEMENT_ENABLED_PROP));

  IntWritable[] fPos = DefaultStringifier.loadArray(conf,
    SqoopHCatUtilities.HCAT_FIELD_POSITIONS_PROP, IntWritable.class);
  hCatFieldPositions = new int[fPos.length];
  for (int i = 0; i < fPos.length; ++i) {
    hCatFieldPositions[i] = fPos[i].get();
  }

  LOG.debug("Hive delims replacement enabled : " + doHiveDelimsReplacement);
  LOG.debug("Hive Delimiters : " + hiveDelimiters.toString());
  LOG.debug("Hive delimiters replacement : " + hiveDelimsReplacement);
  staticPartitionKeys = conf
    .getStrings(SqoopHCatUtilities.HCAT_STATIC_PARTITION_KEY_PROP);
  String partKeysString = staticPartitionKeys == null ? ""
    : Arrays.toString(staticPartitionKeys);
  LOG.debug("Static partition key used : "  + partKeysString);
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:68,
代码来源:SqoopHCatImportHelper.java

示例8: loadLargeObjects

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
public void loadLargeObjects(LargeObjectLoader loader) {
}
 

开发者ID:aliyun,
项目名称:aliyun-maxcompute-data-collectors,
代码行数:3,
代码来源:TestMainframeDatasetFTPRecordReader.java

示例9: loadLargeObjects

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
public void loadLargeObjects(LargeObjectLoader __loader)
    throws SQLException, IOException, InterruptedException {
}
 

开发者ID:lhfei,
项目名称:cloud-doc,
代码行数:4,
代码来源:VDN_AVLB_MINUTELY_FULLY_REPORT.java

示例10: loadLargeObjects0

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
public void loadLargeObjects0(LargeObjectLoader __loader)
    throws SQLException, IOException, InterruptedException {
}
 

开发者ID:lhfei,
项目名称:cloud-doc,
代码行数:4,
代码来源:VDN_AVLB_MINUTELY_FULLY_REPORT.java

示例11: initDefaults

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
private void initDefaults(Configuration baseConfiguration) {
	// first, set the true defaults if nothing else happens.
	// default action is to run the full pipeline.
	this.hadoopMapRedHome = System.getenv("HADOOP_MAPRED_HOME");

	this.hiveHome = getHiveHomeDefault();
	this.hCatHome = getHCatHomeDefault();

	this.inputDelimiters = new DelimiterSet(
			DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
			DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
	this.outputDelimiters = new DelimiterSet();

	// Set this to cwd, but -Dsqoop.src.dir can override.
	this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");

	String myTmpDir = System.getProperty("test.build.data", "/tmp/");
	if (!myTmpDir.endsWith(File.separator)) {
		myTmpDir = myTmpDir + File.separator;
	}

	this.tmpDir = myTmpDir;
	String localUsername = System.getProperty("user.name", "unknown");
	this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
			+ "/compile");
	this.jarDirIsAuto = true;
	this.layout = FileLayout.TextFile;

	this.areOutputDelimsManuallySet = false;
	this.areInputDelimsManuallySet = false;

	this.numMappers = DEFAULT_NUM_MAPPERS;
	this.useCompression = false;
	this.compressionCodec = null;
	this.directSplitSize = 0;

	this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;

	// Don't set a default value for fetchsize. This allows a JDBCManager to
	// provide a database-specific default, if no value is provided by the
	// user.
	this.fetchSize = null;

	if (null == baseConfiguration) {
		this.conf = new Configuration();
	} else {
		this.conf = baseConfiguration;
	}

	this.extraArgs = null;

	this.dbOutColumns = null;

	this.incrementalMode = IncrementalMode.None;

	this.updateMode = UpdateMode.UpdateOnly;

	// Creating instances for user specific mapping
	this.mapColumnHive = new Properties();
	this.mapColumnJava = new Properties();

	// We do not want to be verbose too much if not explicitly needed
	this.verbose = false;
	this.isValidationEnabled = false; // validation is disabled by default
	this.validatorClass = RowCountValidator.class;
	this.validationThresholdClass = AbsoluteValidationThreshold.class;
	this.validationFailureHandlerClass = AbortOnFailureHandler.class;
}
 

开发者ID:unicredit,
项目名称:zSqoop,
代码行数:69,
代码来源:SqoopOptions.java

示例12: configureInputFormat

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol) throws IOException {
  ConnManager mgr = getContext().getConnManager();
  try {
    String username = options.getUsername();
    if (null == username || username.length() == 0) {
      DBConfiguration.configureDB(job.getConfiguration(),
          mgr.getDriverClass(), options.getConnectString(),
          options.getFetchSize(), options.getConnectionParams());
    } else {
      DBConfiguration.configureDB(job.getConfiguration(),
          mgr.getDriverClass(), options.getConnectString(),
          username, options.getPassword(), options.getFetchSize(),
          options.getConnectionParams());
    }

    if (null != tableName) {
      // Import a table.
      String [] colNames = options.getColumns();
      if (null == colNames) {
        colNames = mgr.getColumnNames(tableName);
      }

      String [] sqlColNames = null;
      if (null != colNames) {
        sqlColNames = new String[colNames.length];
        for (int i = 0; i < colNames.length; i++) {
          sqlColNames[i] = mgr.escapeColName(colNames[i]);
        }
      }

      // It's ok if the where clause is null in DBInputFormat.setInput.
      String whereClause = options.getWhereClause();

      // We can't set the class properly in here, because we may not have the
      // jar loaded in this JVM. So we start by calling setInput() with
      // DBWritable and then overriding the string manually.
      DataDrivenDBInputFormat.setInput(job, DBWritable.class,
          mgr.escapeTableName(tableName), whereClause,
          mgr.escapeColName(splitByCol), sqlColNames);

      // If user specified boundary query on the command line propagate it to
      // the job
      if (options.getBoundaryQuery() != null) {
        DataDrivenDBInputFormat.setBoundingQuery(job.getConfiguration(),
                options.getBoundaryQuery());
      }
    } else {
      // Import a free-form query.
      String inputQuery = options.getSqlQuery();
      String sanitizedQuery = inputQuery.replace(
          DataDrivenDBInputFormat.SUBSTITUTE_TOKEN, " (1 = 1) ");

      String inputBoundingQuery = options.getBoundaryQuery();
      if (inputBoundingQuery == null) {
        inputBoundingQuery = buildBoundaryQuery(splitByCol, sanitizedQuery);
      }
      DataDrivenDBInputFormat.setInput(job, DBWritable.class,
          inputQuery, inputBoundingQuery);
      new DBConfiguration(job.getConfiguration()).setInputOrderBy(
          splitByCol);
    }

    LOG.debug("Using table class: " + tableClassName);
    job.getConfiguration().set(ConfigurationHelper.getDbInputClassProperty(),
        tableClassName);

    job.getConfiguration().setLong(LargeObjectLoader.MAX_INLINE_LOB_LEN_KEY,
        options.getInlineLobLimit());

    LOG.debug("Using InputFormat: " + inputFormatClass);
    job.setInputFormatClass(inputFormatClass);
  } finally {
    try {
      mgr.close();
    } catch (SQLException sqlE) {
      LOG.warn("Error closing connection: " + sqlE);
    }
  }
}
 

开发者ID:unicredit,
项目名称:zSqoop,
代码行数:82,
代码来源:DataDrivenImportJob.java

示例13: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  this.lobLoader = new LargeObjectLoader(context.getConfiguration(),
      FileOutputFormat.getWorkOutputPath(context));
}
 

开发者ID:unicredit,
项目名称:zSqoop,
代码行数:7,
代码来源:SequenceFileImportMapper.java

示例14: setup

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void setup(Context context)
  throws IOException, InterruptedException {
  Configuration conf = context.getConfiguration();
  String inputJobInfoStr = conf.get(HCatConstants.HCAT_KEY_JOB_INFO);
  jobInfo =
    (InputJobInfo) HCatUtil.deserialize(inputJobInfoStr);
  dataColsSchema = jobInfo.getTableInfo().getDataColumns();
  partitionSchema =
    jobInfo.getTableInfo().getPartitionColumns();
  StringBuilder storerInfoStr = new StringBuilder(1024);
  StorerInfo storerInfo = jobInfo.getTableInfo().getStorerInfo();
  storerInfoStr.append("HCatalog Storer Info : ")
    .append("\n\tHandler = ").append(storerInfo.getStorageHandlerClass())
    .append("\n\tInput format class = ").append(storerInfo.getIfClass())
    .append("\n\tOutput format class = ").append(storerInfo.getOfClass())
    .append("\n\tSerde class = ").append(storerInfo.getSerdeClass());
  Properties storerProperties = storerInfo.getProperties();
  if (!storerProperties.isEmpty()) {
    storerInfoStr.append("\nStorer properties ");
    for (Map.Entry<Object, Object> entry : storerProperties.entrySet()) {
      String key = (String) entry.getKey();
      Object val = entry.getValue();
      storerInfoStr.append("\n\t").append(key).append('=').append(val);
    }
  }
  storerInfoStr.append("\n");
  LOG.info(storerInfoStr);

  hCatFullTableSchema = new HCatSchema(dataColsSchema.getFields());
  for (HCatFieldSchema hfs : partitionSchema.getFields()) {
    hCatFullTableSchema.append(hfs);
  }
  fieldCount = hCatFullTableSchema.size();
  lobLoader = new LargeObjectLoader(conf,
    new Path(jobInfo.getTableInfo().getTableLocation()));
  bigDecimalFormatString = conf.getBoolean(
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT,
    ImportJobBase.PROPERTY_BIGDECIMAL_FORMAT_DEFAULT);
  debugHCatImportMapper = conf.getBoolean(
    SqoopHCatUtilities.DEBUG_HCAT_IMPORT_MAPPER_PROP, false);
  IntWritable[] delimChars = DefaultStringifier.loadArray(conf,
      SqoopHCatUtilities.HIVE_DELIMITERS_TO_REPLACE_PROP, IntWritable.class);
  hiveDelimiters = new DelimiterSet(
    (char) delimChars[0].get(), (char) delimChars[1].get(),
    (char) delimChars[2].get(), (char) delimChars[3].get(),
    delimChars[4].get() == 1 ? true : false);
  hiveDelimsReplacement =
    conf.get(SqoopHCatUtilities.HIVE_DELIMITERS_REPLACEMENT_PROP);
  if (hiveDelimsReplacement == null) {
    hiveDelimsReplacement = "";
  }
  doHiveDelimsReplacement = Boolean.valueOf(conf.get(
    SqoopHCatUtilities.HIVE_DELIMITERS_REPLACEMENT_ENABLED_PROP));

  IntWritable[] fPos = DefaultStringifier.loadArray(conf,
      SqoopHCatUtilities.HCAT_FIELD_POSITIONS_PROP, IntWritable.class);
  hCatFieldPositions = new int[fPos.length];
  for (int i = 0; i < fPos.length; ++i) {
    hCatFieldPositions[i] = fPos[i].get();
  }

  LOG.debug("Hive delims replacement enabled : " + doHiveDelimsReplacement);
  LOG.debug("Hive Delimiters : " + hiveDelimiters.toString());
  LOG.debug("Hive delimiters replacement : " + hiveDelimsReplacement);
  staticPartitionKey =
    conf.get(SqoopHCatUtilities.HCAT_STATIC_PARTITION_KEY_PROP);
  LOG.debug("Static partition key used : " + staticPartitionKey);


}
 

开发者ID:unicredit,
项目名称:zSqoop,
代码行数:72,
代码来源:SqoopHCatImportMapper.java

示例15: initDefaults

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
private void initDefaults(Configuration baseConfiguration) {
  // first, set the true defaults if nothing else happens.
  // default action is to run the full pipeline.
  this.hadoopHome = System.getenv("HADOOP_HOME");

  // Set this with $HIVE_HOME, but -Dhive.home can override.
  this.hiveHome = System.getenv("HIVE_HOME");
  this.hiveHome = System.getProperty("hive.home", this.hiveHome);

  this.inputDelimiters = new DelimiterSet(
      DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR,
      DelimiterSet.NULL_CHAR, DelimiterSet.NULL_CHAR, false);
  this.outputDelimiters = new DelimiterSet();

  // Set this to cwd, but -Dsqoop.src.dir can override.
  this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");

  String myTmpDir = System.getProperty("test.build.data", "/tmp/");
  if (!myTmpDir.endsWith(File.separator)) {
    myTmpDir = myTmpDir + File.separator;
  }

  this.tmpDir = myTmpDir;
  String localUsername = System.getProperty("user.name", "unknown");
  this.jarOutputDir = getNonceJarDir(tmpDir + "sqoop-" + localUsername
      + "/compile");
  this.jarDirIsAuto = true;
  this.layout = FileLayout.TextFile;

  this.areDelimsManuallySet = false;

  this.numMappers = DEFAULT_NUM_MAPPERS;
  this.useCompression = false;
  this.compressionCodec = null;
  this.directSplitSize = 0;

  this.maxInlineLobSize = LargeObjectLoader.DEFAULT_MAX_LOB_LENGTH;

  // Don't set a default value for fetchsize. This allows a JDBCManager to
  // provide a database-specific default, if no value is provided by the
  // user.
  this.fetchSize = null;

  if (null == baseConfiguration) {
    this.conf = new Configuration();
  } else {
    this.conf = baseConfiguration;
  }

  this.extraArgs = null;

  this.dbOutColumns = null;

  this.incrementalMode = IncrementalMode.None;

  this.updateMode = UpdateMode.UpdateOnly;

  // Creating instances for user specific mapping
  this.mapColumnHive = new Properties();
  this.mapColumnJava = new Properties();

  // We do not want to be verbose too much if not explicitly needed
  this.verbose = false;
}
 

开发者ID:infinidb,
项目名称:sqoop,
代码行数:65,
代码来源:SqoopOptions.java

示例16: configureInputFormat

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
    String tableClassName, String splitByCol) throws IOException {
  ConnManager mgr = getContext().getConnManager();
  try {
    String username = options.getUsername();
    if (null == username || username.length() == 0) {
      DBConfiguration.configureDB(job.getConfiguration(),
          mgr.getDriverClass(), options.getConnectString(),
          options.getFetchSize());
    } else {
      DBConfiguration.configureDB(job.getConfiguration(),
          mgr.getDriverClass(), options.getConnectString(),
          username, options.getPassword(), options.getFetchSize());
    }

    if (null != tableName) {
      // Import a table.
      String [] colNames = options.getColumns();
      if (null == colNames) {
        colNames = mgr.getColumnNames(tableName);
      }

      String [] sqlColNames = null;
      if (null != colNames) {
        sqlColNames = new String[colNames.length];
        for (int i = 0; i < colNames.length; i++) {
          sqlColNames[i] = mgr.escapeColName(colNames[i]);
        }
      }

      // It's ok if the where clause is null in DBInputFormat.setInput.
      String whereClause = options.getWhereClause();

      // We can't set the class properly in here, because we may not have the
      // jar loaded in this JVM. So we start by calling setInput() with
      // DBWritable and then overriding the string manually.
      DataDrivenDBInputFormat.setInput(job, DBWritable.class,
          mgr.escapeTableName(tableName), whereClause,
          mgr.escapeColName(splitByCol), sqlColNames);

      // If user specified boundary query on the command line propagate it to
      // the job
      if (options.getBoundaryQuery() != null) {
        DataDrivenDBInputFormat.setBoundingQuery(job.getConfiguration(),
                options.getBoundaryQuery());
      }
    } else {
      // Import a free-form query.
      String inputQuery = options.getSqlQuery();
      String sanitizedQuery = inputQuery.replace(
          DataDrivenDBInputFormat.SUBSTITUTE_TOKEN, " (1 = 1) ");

      String inputBoundingQuery = options.getBoundaryQuery();
      if (inputBoundingQuery == null) {
        inputBoundingQuery = buildBoundaryQuery(splitByCol, sanitizedQuery);
      }
      DataDrivenDBInputFormat.setInput(job, DBWritable.class,
          inputQuery, inputBoundingQuery);
      new DBConfiguration(job.getConfiguration()).setInputOrderBy(
          splitByCol);
    }

    LOG.debug("Using table class: " + tableClassName);
    job.getConfiguration().set(ConfigurationHelper.getDbInputClassProperty(),
        tableClassName);

    job.getConfiguration().setLong(LargeObjectLoader.MAX_INLINE_LOB_LEN_KEY,
        options.getInlineLobLimit());

    LOG.debug("Using InputFormat: " + inputFormatClass);
    job.setInputFormatClass(inputFormatClass);
  } finally {
    try {
      mgr.close();
    } catch (SQLException sqlE) {
      LOG.warn("Error closing connection: " + sqlE);
    }
  }
}
 

开发者ID:infinidb,
项目名称:sqoop,
代码行数:81,
代码来源:DataDrivenImportJob.java

示例17: configureInputFormat

点赞 2

import com.cloudera.sqoop.lib.LargeObjectLoader; //导入依赖的package包/类
@Override
protected void configureInputFormat(Job job, String tableName,
                                    String tableClassName, String splitByCol) throws IOException {
    ConnManager mgr = getContext().getConnManager();
    try {
        String username = options.getUsername();
        if (null == username || username.length() == 0) {
            DBConfiguration.configureDB(job.getConfiguration(),
                    mgr.getDriverClass(), options.getConnectString(),
                    options.getFetchSize(), options.getConnectionParams());
        } else {
            DBConfiguration.configureDB(job.getConfiguration(),
                    mgr.getDriverClass(), options.getConnectString(),
                    username, options.getPassword(), options.getFetchSize(),
                    options.getConnectionParams());
        }

        if (null != tableName) {
            // Import a table.
            String[] colNames = options.getColumns();
            if (null == colNames) {
                colNames = mgr.getColumnNames(tableName);
            }

            String[] sqlColNames = null;
            if (null != colNames) {
                sqlColNames = new String[colNames.length];
                for (int i = 0; i < colNames.length; i++) {
                    sqlColNames[i] = mgr.escapeColName(colNames[i]);
                }
            }

            // It's ok if the where clause is null in DBInputFormat.setInput.
            String whereClause = options.getWhereClause();

            // We can't set the class properly in here, because we may not have the
            // jar loaded in this JVM. So we start by calling setInput() with
            // DBWritable and then overriding the string manually.
            HSDBInputFormat.setInput(job, DBWritable.class,
                    mgr.escapeTableName(tableName), whereClause,
                    mgr.escapeColName(splitByCol), sqlColNames);

            // If user specified boundary query on the command line propagate it to
            // the job
            if (options.getBoundaryQuery() != null) {
                HSDBInputFormat.setBoundingQuery(job.getConfiguration(),
                        options.getBoundaryQuery());
            }
        } else {
            // Import a free-form query.
            String inputQuery = options.getSqlQuery();
            String sanitizedQuery = inputQuery.replace(
                    HSDBInputFormat.SUBSTITUTE_TOKEN, " (1 = 1) ");

            String inputBoundingQuery = options.getBoundaryQuery();
            if (inputBoundingQuery == null) {
                inputBoundingQuery = buildBoundaryQuery(splitByCol, sanitizedQuery);
            }
            HSDBInputFormat.setInput(job, DBWritable.class,
                    inputQuery, inputBoundingQuery);
            new DBConfiguration(job.getConfiguration()).setInputOrderBy(
                    splitByCol);
        }

        LOG.debug("Using table class: " + tableClassName);
        job.getConfiguration().set(ConfigurationHelper.getDbInputClassProperty(),
                tableClassName);

        job.getConfiguration().setLong(LargeObjectLoader.MAX_INLINE_LOB_LEN_KEY,
                options.getInlineLobLimit());

        LOG.debug("Using InputFormat: " + inputFormatClass);
        job.setInputFormatClass(inputFormatClass);
    } finally {
        try {
            mgr.close();
        } catch (SQLException sqlE) {
            LOG.warn("Error closing connection: " + sqlE);
        }
    }
}
 

开发者ID:waveaccounting,
项目名称:honeyspoon,
代码行数:82,
代码来源:HSImportJob.java


版权声明:本文转自网络文章,转载此文章仅为分享知识,如有侵权,请联系管理员进行删除。
喜欢 (0)