To use the , you need construct an instance of it by specifying IoTDBSinkOptions and IoTSerializationSchema instances. The IoTDBSink send only one event after another by default, but you can change to batch by invoking withBatchSize(int).

    Example

    • A simulated Source SensorSource generates data points per 1 second.
    • Flink uses IoTDBSink to consume the generated data points and write the data into IoTDB.

    It is noteworthy that to use IoTDBSink, schema auto-creation in IoTDB should be enabled.

    Usage

    • Launch the IoTDB server.
    • Run org.apache.iotdb.flink.FlinkIoTDBSink.java to run the flink job on local mini cluster.

    IoTDBSource

    Example

    This example shows a case where data are read from IoTDB.

    1. import org.apache.iotdb.flink.options.IoTDBSourceOptions;
    2. import org.apache.iotdb.rpc.IoTDBConnectionException;
    3. import org.apache.iotdb.rpc.StatementExecutionException;
    4. import org.apache.iotdb.rpc.TSStatusCode;
    5. import org.apache.iotdb.session.Session;
    6. import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
    7. import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
    8. import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;
    9. import org.apache.iotdb.tsfile.read.common.RowRecord;
    10. import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
    11. import java.util.ArrayList;
    12. import java.util.List;
    13. public class FlinkIoTDBSource {
    14. static final String ROOT_SG1_D1_S1 = "root.sg1.d1.s1";
    15. public static void main(String[] args) throws Exception {
    16. prepareData();
    17. // run the flink job on local mini cluster
    18. StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    19. IoTDBSourceOptions ioTDBSourceOptions =
    20. new IoTDBSourceOptions("127.0.0.1", 6667, "root", "root",
    21. "select s1 from " + ROOT_SG1_D1 + " align by device");
    22. env.addSource(
    23. new IoTDBSource<RowRecord>(ioTDBSourceOptions) {
    24. @Override
    25. public RowRecord convert(RowRecord rowRecord) {
    26. return rowRecord;
    27. }
    28. })
    29. .name("sensor-source")
    30. .print()
    31. .setParallelism(2);
    32. env.execute();
    33. }
    34. /**
    35. * Write some data to IoTDB
    36. */
    37. private static void prepareData() throws IoTDBConnectionException, StatementExecutionException {
    38. try {
    39. session.setStorageGroup("root.sg1");
    40. if (!session.checkTimeseriesExists(ROOT_SG1_D1_S1)) {
    41. session.createTimeseries(
    42. ROOT_SG1_D1_S1, TSDataType.INT64, TSEncoding.RLE, CompressionType.SNAPPY);
    43. List<String> measurements = new ArrayList<>();
    44. List<TSDataType> types = new ArrayList<>();
    45. measurements.add("s1");
    46. measurements.add("s2");
    47. measurements.add("s3");
    48. types.add(TSDataType.INT64);
    49. types.add(TSDataType.INT64);
    50. types.add(TSDataType.INT64);
    51. for (long time = 0; time < 100; time++) {
    52. List<Object> values = new ArrayList<>();
    53. values.add(1L);
    54. values.add(2L);
    55. values.add(3L);
    56. session.insertRecord(ROOT_SG1_D1, time, measurements, types, values);
    57. }
    58. }
    59. } catch (StatementExecutionException e) {
    60. if (e.getStatusCode() != TSStatusCode.PATH_ALREADY_EXIST_ERROR.getStatusCode()) {
    61. throw e;
    62. }
    63. }
    64. }
    65. }

    Usage