|
16 | 16 | */ |
17 | 17 | package org.apache.spark.flume.sink |
18 | 18 |
|
19 | | -import org.apache.flume.sink.AbstractSink |
20 | | -import java.util.concurrent.locks.ReentrantLock |
21 | | -import org.apache.flume.Sink.Status |
22 | | -import org.apache.spark.flume.{SparkSinkEvent, EventBatch, SparkFlumeProtocol} |
23 | | -import scala.util.control.Breaks |
| 19 | + |
| 20 | +import java.net.InetSocketAddress |
24 | 21 | import java.nio.ByteBuffer |
25 | | -import org.apache.flume.{FlumeException, Context} |
26 | | -import org.slf4j.LoggerFactory |
27 | | -import java.util.concurrent.atomic.AtomicLong |
28 | | -import org.apache.commons.lang.RandomStringUtils |
29 | | -import java.util.concurrent._ |
30 | 22 | import java.util |
31 | | -import org.apache.flume.conf.{ConfigurationException, Configurable} |
| 23 | +import java.util.concurrent._ |
| 24 | +import java.util.concurrent.atomic.AtomicLong |
| 25 | +import java.util.concurrent.locks.ReentrantLock |
| 26 | + |
| 27 | +import scala.util.control.Breaks |
| 28 | + |
32 | 29 | import com.google.common.util.concurrent.ThreadFactoryBuilder |
33 | 30 | import org.apache.avro.ipc.NettyServer |
34 | 31 | import org.apache.avro.ipc.specific.SpecificResponder |
35 | | -import java.net.InetSocketAddress |
| 32 | +import org.apache.commons.lang.RandomStringUtils |
| 33 | +import org.apache.flume.Sink.Status |
| 34 | +import org.apache.flume.conf.{ConfigurationException, Configurable} |
| 35 | +import org.apache.flume.sink.AbstractSink |
| 36 | +import org.apache.flume.{FlumeException, Context} |
| 37 | + |
| 38 | +import org.apache.spark.flume.{SparkSinkEvent, EventBatch, SparkFlumeProtocol} |
| 39 | +import org.slf4j.LoggerFactory |
| 40 | + |
| 41 | + |
36 | 42 |
|
37 | 43 | class SparkSink extends AbstractSink with Configurable { |
38 | 44 | private val LOG = LoggerFactory.getLogger(this.getClass) |
|
0 commit comments