public class CqlBulkOutputFormat
extends org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
implements org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
CqlBulkOutputFormat acts as a Hadoop-specific
 OutputFormat that allows reduce tasks to store keys (and corresponding
 bound variable values) as CQL rows (and respective columns) in a given
 table.
 
 As is the case with the CqlOutputFormat, 
 you need to set the prepared statement in your
 Hadoop job Configuration. The CqlConfigHelper class, through its
 org.apache.cassandra.hadoop.ConfigHelper#setOutputPreparedStatement method, is provided to make this
 simple.
 you need to set the Keyspace. The ConfigHelper class, through its
 ConfigHelper.setOutputColumnFamily(org.apache.hadoop.conf.Configuration, java.lang.String) method, is provided to make this
 simple.
 
| Modifier and Type | Class and Description | 
|---|---|
| static class  | CqlBulkOutputFormat.NullOutputCommitter | 
| Constructor and Description | 
|---|
| CqlBulkOutputFormat() | 
| Modifier and Type | Method and Description | 
|---|---|
| void | checkOutputSpecs(org.apache.hadoop.fs.FileSystem filesystem,
                org.apache.hadoop.mapred.JobConf job)Deprecated.  | 
| void | checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext context) | 
| static boolean | getDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf) | 
| static java.util.Collection<java.lang.String> | getIgnoreHosts(org.apache.hadoop.conf.Configuration conf)Get the hosts to ignore as a collection of strings | 
| org.apache.hadoop.mapreduce.OutputCommitter | getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext context) | 
| CqlBulkRecordWriter | getRecordWriter(org.apache.hadoop.fs.FileSystem filesystem,
               org.apache.hadoop.mapred.JobConf job,
               java.lang.String name,
               org.apache.hadoop.util.Progressable progress)Deprecated.  | 
| CqlBulkRecordWriter | getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context)Get the  RecordWriterfor the given task. | 
| static java.lang.String | getTableForAlias(org.apache.hadoop.conf.Configuration conf,
                java.lang.String alias) | 
| static java.lang.String | getTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
                       java.lang.String columnFamily) | 
| static java.lang.String | getTableSchema(org.apache.hadoop.conf.Configuration conf,
              java.lang.String columnFamily) | 
| static void | setDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf,
                        boolean deleteSrc) | 
| static void | setIgnoreHosts(org.apache.hadoop.conf.Configuration conf,
              java.lang.String... ignoreNodes)Set the hosts to ignore. | 
| static void | setIgnoreHosts(org.apache.hadoop.conf.Configuration conf,
              java.lang.String ignoreNodesCsv)Set the hosts to ignore as comma delimited values. | 
| static void | setTableAlias(org.apache.hadoop.conf.Configuration conf,
             java.lang.String alias,
             java.lang.String columnFamily) | 
| static void | setTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
                       java.lang.String columnFamily,
                       java.lang.String insertStatement) | 
| static void | setTableSchema(org.apache.hadoop.conf.Configuration conf,
              java.lang.String columnFamily,
              java.lang.String schema) | 
@Deprecated public CqlBulkRecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem filesystem, org.apache.hadoop.mapred.JobConf job, java.lang.String name, org.apache.hadoop.util.Progressable progress) throws java.io.IOException
getRecordWriter in interface org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>java.io.IOExceptionpublic CqlBulkRecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws java.io.IOException, java.lang.InterruptedException
RecordWriter for the given task.getRecordWriter in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>context - the information about the current task.RecordWriter to write the output for the job.java.io.IOExceptionjava.lang.InterruptedExceptionpublic void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext context)
checkOutputSpecs in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>@Deprecated
public void checkOutputSpecs(org.apache.hadoop.fs.FileSystem filesystem,
                                         org.apache.hadoop.mapred.JobConf job)
                                  throws java.io.IOException
checkOutputSpecs in interface org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>java.io.IOExceptionpublic org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext context)
                                                               throws java.io.IOException,
                                                                      java.lang.InterruptedException
getOutputCommitter in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>java.io.IOExceptionjava.lang.InterruptedExceptionpublic static void setTableSchema(org.apache.hadoop.conf.Configuration conf,
                                  java.lang.String columnFamily,
                                  java.lang.String schema)
public static void setTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
                                           java.lang.String columnFamily,
                                           java.lang.String insertStatement)
public static java.lang.String getTableSchema(org.apache.hadoop.conf.Configuration conf,
                                              java.lang.String columnFamily)
public static java.lang.String getTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
                                                       java.lang.String columnFamily)
public static void setDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf,
                                            boolean deleteSrc)
public static boolean getDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf)
public static void setTableAlias(org.apache.hadoop.conf.Configuration conf,
                                 java.lang.String alias,
                                 java.lang.String columnFamily)
public static java.lang.String getTableForAlias(org.apache.hadoop.conf.Configuration conf,
                                                java.lang.String alias)
public static void setIgnoreHosts(org.apache.hadoop.conf.Configuration conf,
                                  java.lang.String ignoreNodesCsv)
conf - job configurationignoreNodesCsv - a comma delimited list of nodes to ignorepublic static void setIgnoreHosts(org.apache.hadoop.conf.Configuration conf,
                                  java.lang.String... ignoreNodes)
conf - job configurationignoreNodes - the nodes to ignorepublic static java.util.Collection<java.lang.String> getIgnoreHosts(org.apache.hadoop.conf.Configuration conf)
conf - job configurationCopyright © 2018 The Apache Software Foundation