读取文件的数据往mysql数据表中写出

版权声明:本文为博主原创文章,未经博主允许不得转载。 https://blog.csdn.net/qq_28844767/article/details/80491833


package mapreduce.format.dbOutputFormat;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.db.DBConfiguration;
import org.apache.hadoop.mapred.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;

/**
 * 描述 : 读取文件的数据往mysql数据表中写出 
 *
 */

public class MyDBOutputFormatMR {
	
	private static final String driverClass = "com.mysql.jdbc.Driver";
	private static final String dbUrl = "jdbc:mysql://hadoop01:3306/jdbc_test?characterEncoding=utf-8";
	private static final String username = "root";
	private static final String password = "root";
	
	public static void main(String[] args) throws Exception {
		
		Configuration conf = new Configuration();
		
//		conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, driverClass);
//		conf.set(DBConfiguration.URL_PROPERTY, dbUrl);
//		conf.set(DBConfiguration.USERNAME_PROPERTY, username);
//		conf.set(DBConfiguration.PASSWORD_PROPERTY, password);
		
		DBConfiguration.configureDB(conf, driverClass, dbUrl,username,password);
		
		Job job = Job.getInstance(conf);
		
		job.setJarByClass(MyDBOutputFormatMR.class);
		job.setMapperClass(MyDBOutputFormatMRMapper.class);
		job.setMapOutputKeyClass(Student.class);
		job.setMapOutputValueClass(NullWritable.class);
		
		job.setReducerClass(MyDBOutputFormatMRReducer.class);
		job.setOutputKeyClass(Student.class);
		job.setOutputValueClass(NullWritable.class);
		
		job.setNumReduceTasks(0);
		
		job.setInputFormatClass(TextInputFormat.class);
		
		/**
		 * 指定数据输出目的地: 那张表
		 */
		job.setOutputFormatClass(DBOutputFormat.class);
		
//		此处给出的字段名称约束 一定要和Student类中描述读进或者写出的字段名称个数一致
		DBOutputFormat.setOutput(job, "student", new String[]{"id", "name", "sex", "age", "department"});
		
		Path inputPath = new Path("D:\\bigdata\\a.txt");
		FileInputFormat.setInputPaths(job, inputPath);
		
		System.exit(job.waitForCompletion(true) ? 0 : 1);
		
	}
	
	static class MyDBOutputFormatMRMapper extends Mapper<LongWritable, Text, Student, NullWritable>{

		@Override
		protected void map(LongWritable key, Text value,Context context)
				throws IOException, InterruptedException {

			String[] split = value.toString().split("\t");
			Student student = new Student();
			student.setId(Integer.parseInt(split[0]));
			student.setName(split[1]);
			student.setSex(split[2]);
			student.setAge(Integer.parseInt(split[3]));
			student.setDepartment(split[4]);
			
			context.write(student, NullWritable.get());

		}
			
	}
	
	static class MyDBOutputFormatMRReducer extends Reducer<Student, NullWritable, Student, NullWritable>{

		@Override
		protected void reduce(Student key, Iterable<NullWritable> values,Context context)
				throws IOException, InterruptedException {

			for(NullWritable nvl : values){
				
				context.write(key, NullWritable.get());
				
			}

		}
			
	}
	
}

自定义封装从数据库读取的一条记录成为一个对应的java对象,

实现WritableComparable<Student> , DBWritable:

package mapreduce.format.dbOutputFormat;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;

/**
 * 描述 : 此类的作用就是用来自定义封装从数据库读取的一条记录成为一个对应的java对象
 * 提供了序列化和反序列化方法
 * 提供了sql语句的设置参数方法和结果集检索的对象封装方法
 */

public class Student implements WritableComparable<Student> , DBWritable{
	private int id;
	private String name;
	private String sex;
	private int age;
	private String department;
	public Student(int id, String name, String sex, int age, String department) {
		super();
		this.id = id;
		this.name = name;
		this.sex = sex;
		this.age = age;
		this.department = department;
	}
	public Student() {
		super();
	}
	public int getId() {
		return id;
	}
	public void setId(int id) {
		this.id = id;
	}
	public String getName() {
		return name;
	}
	public void setName(String name) {
		this.name = name;
	}
	public String getSex() {
		return sex;
	}
	public void setSex(String sex) {
		this.sex = sex;
	}
	public int getAge() {
		return age;
	}
	public void setAge(int age) {
		this.age = age;
	}
	public String getDepartment() {
		return department;
	}
	public void setDepartment(String department) {
		this.department = department;
	}
	@Override
	public String toString() {
		return id + "\t" + name + "\t" + sex + "\t" + age + "\t" + department;
	}
	@Override
	public void readFields(DataInput in) throws IOException {
		this.id = in.readInt();
		this.name = in.readUTF();
		this.sex = in.readUTF();
		this.age = in.readInt();
		this.department = in.readUTF();
		
	}
	@Override
	public void write(DataOutput out) throws IOException {

		out.writeInt(id);
		out.writeUTF(name);
		out.writeUTF(sex);
		out.writeInt(age);
		out.writeUTF(department);
		
	}
	
	/**
	 * 排序规则。同时也是分组规则
	 */
	@Override
	public int compareTo(Student o) {
		
		return this.id - o.id;
	}
	
	@Override
	public void write(PreparedStatement statement) throws SQLException {
		statement.setInt(1, id);
		statement.setString(2, name);
		statement.setString(3, sex);
		statement.setInt(4, age);
		statement.setString(5, department);
//		statement.setInt(4, age);
//		statement.setString(5, department);
		
	}
	
	@Override
	public void readFields(ResultSet resultSet) throws SQLException {
		this.id = resultSet.getInt(1);
		this.name = resultSet.getString(2);
		this.sex = resultSet.getString(3);
		this.age = resultSet.getInt(4);
		this.department = resultSet.getString(5);
//		this.age = resultSet.getInt(4);
//		this.department = resultSet.getString(5);
	}
	
}


猜你喜欢

转载自blog.csdn.net/qq_28844767/article/details/80491833
今日推荐