scala数据格式转换

操作json

json取嵌套value,修改某个字段,新增key value

思路是在ObjectNode可以进行操作

数据源片段的代码

override def source(): DStream[_] = new KafkaDataSource(kconfig, SparkEnv.getStreamingContext()).createKafkaDstream(kconfig.getProducerTopic).filter(_ != null).
  mapPartitions(messages => {
    val objectMapper: ObjectMapper = new ObjectMapper()
    messages.map(message => {
      try {
        val a = 11
        objectMapper.readTree(message.value().toString)
      } catch {
        case ex: JsonProcessingException => null
      }
    }).
      filter(_ != null)
  })

计算时的逻辑

override def compute(stream: DStream[_]): Unit ={
    val context = SparkContext.getOrCreate()
    val kafkaProducer = new AlarmProducerKafka[String, String](kconfig)
    producer = Some(context.broadcast(kafkaProducer))
    stream.foreachRDD(rdd => {

      val bigDataObjectMapper: ObjectMapper = new ObjectMapper()
      bigDataObjectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
      rdd.foreachPartition(partitionOfRecords => {
        partitionOfRecords.foreach(r => {
         /**
            * 此时的r不能直接操作,通过writeValueAsString 转成String后再转成JsonNode
            * 
            * 要是想在原有的json字符串中添加需要转成ObjectNode,此时调用put方法即可添加元素
            */
            
          val c = bigDataObjectMapper.writeValueAsString(r)
          val messages : JsonNode = bigDataObjectMapper.readTree(c)

         val captureLibResultNode:ObjectNode =  messages.path("captureLibResult").get(0).asInstanceOf[ObjectNode]
          captureLibResultNode.put("newKey",12)
        

猜你喜欢

转载自blog.csdn.net/dddddssssa/article/details/107353341