一、编程实现程序:将Kafka中的维度表写入DIM层

### --- 编程实现:KafkaToHBase:将Kafka作为Source,Flink作为消费者从Kafka中获取数据

package ods

import java.util

import com.alibaba.fastjson.JSON
import modes.{AreaInfo, DataInfo, TableObject}
import myutils.ConnHBase
import org.apache.flink.configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.hadoop.hbase.TableName
import org.apache.hadoop.hbase.client.{Connection, Delete, Put, Table}

class SinkHBase extends RichSinkFunction[util.ArrayList[TableObject]]{
var connection : Connection = _
var hbTable : Table = _
/**
* 实例化 HBase
* connection
* hbTable
* @param parameters
*/
override def open(parameters: configuration.Configuration): Unit = {
connection = new ConnHBase().connToHbase
hbTable = connection.getTable(TableName.valueOf("yanqi_area"))
}

override def close(): Unit = {
if(hbTable != null) {
hbTable.close()
}
if (connection != null) {
connection.close()
}
}

/**
* 每来一条数据,会执行一次
* @param value
* @param context
*/
override def invoke(value: util.ArrayList[TableObject], context: SinkFunction.Context[_]): Unit = {
value.forEach(x => {
println(x.toString)
val database: String = x.database
val tableName: String = x.tableName
val typeInfo: String = x.typeInfo

if(database.equalsIgnoreCase("dwshow") && tableName.equalsIgnoreCase("yanqi_trade_orders")) {
if(typeInfo.equalsIgnoreCase("insert")) {
value.forEach(x => {
val info: DataInfo = JSON.parseObject(x.dataInfo, classOf[DataInfo])
insertTradeOrders(hbTable,info)
})
} else if(typeInfo.equalsIgnoreCase("update")) {

} else if (typeInfo.equalsIgnoreCase("delete")) {

}
}

if(database.equalsIgnoreCase("dwshow") && tableName.equalsIgnoreCase("yanqi_area")) {
if(typeInfo.equalsIgnoreCase("insert")) {
value.forEach(x => {
val info: AreaInfo = JSON.parseObject(x.dataInfo, classOf[AreaInfo])
insertArea(hbTable,info)
})
} else if(typeInfo.equalsIgnoreCase("update")) {
value.forEach(x => {
val info: AreaInfo = JSON.parseObject(x.dataInfo, classOf[AreaInfo])
insertArea(hbTable,info)
})
} else if (typeInfo.equalsIgnoreCase("delete")) {
value.forEach(x => {
val info: AreaInfo = JSON.parseObject(x.dataInfo, classOf[AreaInfo])
deleteArea(hbTable,info)
})

}
}

})
}

//yanqi_area省份城市区域表,根据id删除数据
def deleteArea(hbTable: Table, areaInfo: AreaInfo): Unit = {
val delete = new Delete(areaInfo.id.getBytes)
hbTable.delete(delete)
}

def insertArea(hbTable: Table, areaInfo: AreaInfo) : Unit = {
println(areaInfo.toString)
val put = new Put(areaInfo.id.getBytes())
put.addColumn("f1".getBytes(), "name".getBytes(), areaInfo.name.getBytes())
put.addColumn("f1".getBytes(), "pid".getBytes(), areaInfo.pid.getBytes())
put.addColumn("f1".getBytes(), "sname".getBytes(), areaInfo.sname.getBytes())
put.addColumn("f1".getBytes(), "level".getBytes(), areaInfo.level.getBytes())
put.addColumn("f1".getBytes(), "citycode".getBytes(), areaInfo.citycode.getBytes())
put.addColumn("f1".getBytes(), "yzcode".getBytes(), areaInfo.yzcode.getBytes())
put.addColumn("f1".getBytes(), "mername".getBytes(), areaInfo.mername.getBytes())
put.addColumn("f1".getBytes(), "lng".getBytes(), areaInfo.Lng.getBytes())
put.addColumn("f1".getBytes(), "lat".getBytes(), areaInfo.Lat.getBytes())
put.addColumn("f1".getBytes(), "pinyin".getBytes(), areaInfo.pinyin.getBytes())

hbTable.put(put)
}
def insertTradeOrders(hbTable: Table, dataInfo: DataInfo): Unit = {
val put = new Put(dataInfo.orderId.getBytes)
put.addColumn("f1".getBytes,"modifiedTime".getBytes,dataInfo.modifiedTime.getBytes())
put.addColumn("f1".getBytes,"orderNo".getBytes,dataInfo.orderNo.getBytes())
put.addColumn("f1".getBytes,"isPay".getBytes,dataInfo.isPay.getBytes())
put.addColumn("f1".getBytes,"orderId".getBytes,dataInfo.orderId.getBytes())
put.addColumn("f1".getBytes,"tradeSrc".getBytes,dataInfo.tradeSrc.getBytes())
put.addColumn("f1".getBytes,"payTime".getBytes,dataInfo.payTime.getBytes())
put.addColumn("f1".getBytes,"productMoney".getBytes,dataInfo.productMoney.getBytes())
put.addColumn("f1".getBytes,"totalMoney".getBytes,dataInfo.totalMoney.getBytes())
put.addColumn("f1".getBytes,"dataFlag".getBytes,dataInfo.dataFlag.getBytes())
put.addColumn("f1".getBytes,"userId".getBytes,dataInfo.userId.getBytes())
put.addColumn("f1".getBytes,"areaId".getBytes,dataInfo.areaId.getBytes())
put.addColumn("f1".getBytes,"createTime".getBytes,dataInfo.createTime.getBytes())
put.addColumn("f1".getBytes,"payMethod".getBytes,dataInfo.payMethod.getBytes())
put.addColumn("f1".getBytes,"isRefund".getBytes,dataInfo.isRefund.getBytes())
put.addColumn("f1".getBytes,"tradeType".getBytes,dataInfo.tradeType.getBytes())
put.addColumn("f1".getBytes,"status".getBytes,dataInfo.status.getBytes())

hbTable.put(put)
}
}
### --- 编程实现:SinkHBase:从Kafka中获取表数据:将数据下沉到hbash中

package ods

import java.util

import com.alibaba.fastjson.{JSON, JSONObject}
import modes.TableObject
import myutils.SourceKafka
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._

/**
* 1.从kafka的canal这个topic获取数据-----FlinkKafkaConsumer
* 2.把获取到的json格式的数据进行格式转化-----fastjson
* type,database, table,data(jsonArray)
* 3.把转化好的数据保存到HBase中
*/
object KafkaToHBase {

def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val kafkaConsumer = new SourceKafka().getKafkaSource("canal")
kafkaConsumer.setStartFromLatest()

val sourceStream = env.addSource(kafkaConsumer)
// sourceStream.print()
//type,database, table,data(jsonArray)
//
val mapped: DataStream[util.ArrayList[TableObject]] = sourceStream.map(x => {
val jsonObj: JSONObject = JSON.parseObject(x)
val database: AnyRef = jsonObj.get("database")
val table: AnyRef = jsonObj.get("table")
val typeInfo: AnyRef = jsonObj.get("type")

val objects = new util.ArrayList[TableObject]()
jsonObj.getJSONArray("data").forEach(x => {
// print(database.toString + ".." + table.toString + "..." + typeInfo.toString + ".." + x.toString)
objects.add(TableObject(database.toString, table.toString, typeInfo.toString, x.toString))
})
objects
})


/**
* 将数据下沉到HBase中保存
* 1.拿到当前的数据
* 2、addSink()--- 自定义下沉器SinkHBase
*/
mapped.addSink(new SinkHBase)

env.execute()
}
}

Walter Savage Landor:strove with none,for none was worth my strife.Nature I loved and, next to Nature, Art:I warm'd both hands before the fire of life.It sinks, and I am ready to depart

                                                                                                                                                   ——W.S.Landor