若未出现预期结果可私信我答疑
我是头歌闯关王涉猎头歌7千多关,如有其他关卡也可私信我

DBHelper类

package com;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class DBHelper {
    /********** begin **********/
    private static final String driver = "com.mysql.jdbc.Driver";
    private static final String url = "jdbc:mysql://localhost:3306/mydb?useUnicode=true&characterEncoding=UTF-8";
    private static final String username = "root";
    private static final String password = "123123";
    private static Connection conn = null;
    static {
        try {
            Class.forName(driver);
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }
    public static Connection getConnection() {
        if (conn == null) {
            try {
                conn = DriverManager.getConnection(url, username, password);
            } catch (SQLException e) {
                e.printStackTrace();
            }
            return conn;
        }
        return conn;
    }
    public static void main(String[] args) {
        Connection connection = DBHelper.getConnection();
    }
    /********** end **********/
}

JsonMap类

package com;
import com.alibaba.fastjson.JSONObject;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
public class JsonMap  extends Mapper<LongWritable, Text, NullWritable, Put> {
    /********** begin **********/
    Map<String, String> pro = new HashMap<String, String>();
    Put put;
    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        Connection connection = DBHelper.getConnection();
        try {
            Statement statement = connection.createStatement();
            String sql = "select * from province";
            ResultSet resultSetA = statement.executeQuery(sql);
            while (resultSetA.next()) {
                String city_code = resultSetA.getString(1);
                String city_name = resultSetA.getString(2);
                pro.put(city_code, city_name);
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
    public void map(LongWritable key,Text value,Context context) throws IOException, InterruptedException {
        String line = value.toString();
        //解析json数据
        JSONObject jsonObject = JSONObject.parseObject(line);
        String[] data = new String[14];
        data[0] = jsonObject.getString("id");
        data[1] = jsonObject.getString("company_name");
        data[2] = jsonObject.getString("eduLevel_name");
        data[3] = jsonObject.getString("emplType");
        data[4] = jsonObject.getString("jobName");
        String salary=jsonObject.getString("salary");
        if (salary.contains("K-")) {
             Double a =Double.valueOf(salary.substring(0,salary.indexOf("K")));
             Double b =Double.valueOf(salary.substring(salary.indexOf("-")+1,salary.lastIndexOf("K")));
            data[5] = (a+b)/2+"";
        }else {
            data[5]="0";
        }
        data[6] = jsonObject.getString("createDate");
        data[7] = jsonObject.getString("endDate");
        String code = jsonObject.getString("city_code");
        //data[8] = pro.get(code);
        data[8] = code;
        data[9] = jsonObject.getString("companySize");
        data[10] = jsonObject.getString("welfare");
        data[11] = jsonObject.getString("responsibility");
        data[12] = jsonObject.getString("place");
        data[13] = jsonObject.getString("workingExp");
        //循环判空
        for(String i : data) {
            if(i==null||i.equals("")) {
                return;
            }
        }
        String columnFamily = "info";
        put= new Put(data[0].getBytes());
        put.addColumn(columnFamily.getBytes(), "company_name".getBytes(), data[1].getBytes());
        put.addColumn(columnFamily.getBytes(), "eduLevel_name".getBytes(), data[2].getBytes());
        put.addColumn(columnFamily.getBytes(), "emplType".getBytes(), data[3].getBytes());
        put.addColumn(columnFamily.getBytes(), "jobName".getBytes(), data[4].getBytes());
        put.addColumn(columnFamily.getBytes(), "salary".getBytes(), data[5].getBytes());
        put.addColumn(columnFamily.getBytes(), "createDate".getBytes(), data[6].getBytes());
        put.addColumn(columnFamily.getBytes(), "endDate".getBytes(), data[7].getBytes());
        put.addColumn(columnFamily.getBytes(), "city_name".getBytes(), data[8].getBytes());
        put.addColumn(columnFamily.getBytes(), "companySize".getBytes(), data[9].getBytes());
        put.addColumn(columnFamily.getBytes(), "welfare".getBytes(), data[10].getBytes());
        put.addColumn(columnFamily.getBytes(), "responsibility".getBytes(), data[11].getBytes());
        put.addColumn(columnFamily.getBytes(), "place".getBytes(), data[12].getBytes());
        put.addColumn(columnFamily.getBytes(), "workingExp".getBytes(), data[13].getBytes());
        context.write(NullWritable.get(), put);
    }
    /********** end **********/
}

JsonTest类

package com;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
public class JsonTest {
    public static void main(String[] args) throws Exception{
        
        Configuration config = HBaseConfiguration.create();
        //设置zookeeper的配置
        config.set("hbase.zookeeper.quorum", "127.0.0.1");
        Connection connection = ConnectionFactory.createConnection(config);
        Admin admin = connection.getAdmin();
        TableName tableName = TableName.valueOf("job");
        boolean isExists = admin.tableExists(tableName);
        if (!isExists) {
            TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
            ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("info")).build();// 构建列族对象
            tableDescriptor.setColumnFamily(family); // 设置列族
            admin.createTable(tableDescriptor.build()); // 创建表
        } else {
            admin.disableTable(tableName);
            admin.deleteTable(tableName);
            TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
            ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("info")).build();// 构建列族对象
            tableDescriptor.setColumnFamily(family); // 设置列族
            admin.createTable(tableDescriptor.build()); // 创建表
        }
        /********** begin **********/
        
        Job job = Job.getInstance(config);
        job.setJarByClass(JsonTest.class);
        job.setMapperClass(JsonMap.class);
        job.setMapOutputKeyClass(NullWritable.class);
        //只有map没有reduce,所以设置reduce的数目为0
        job.setNumReduceTasks(0);
        //设置数据的输入路径,没有使用参数,直接在程序中写入HDFS的路径
        FileInputFormat.setInputPaths(job, new Path("/root/data/data.json"));
        //驱动函数
        TableMapReduceUtil.initTableReducerJob("job",null, job);
        TableMapReduceUtil.addDependencyJars(job);
        job.waitForCompletion(true);
    /********** end **********/
    }
}

最后在命令行输入

start-hbase.sh

回车测评即可
若操作不明白可私信我

Logo

有“AI”的1024 = 2048,欢迎大家加入2048 AI社区

更多推荐