presto 安装部署与测试

版权声明:觉得还行的话,右上角点个赞哟。 https://blog.csdn.net/u014384314/article/details/87644058

1.安装

参考链接:

https://prestodb.github.io/docs/current/installation/deployment.html

https://blog.csdn.net/eason_oracle/article/details/53160491

1.1 下载:打开迅雷输入上面网址,点击下载:presto-server-0.216.tar.gz 并上传到所有节点的 /presto目录

mkdir /presto

[root@master01 presto]# pwd
/presto
[root@master01 presto]# ls
 presto-server-0.216.tar.gz


1.2 解压:tar -xzvf presto-server-0.216.tar.gz

1.3 mkdir  /presto/data

[root@master01 presto]# ls
data  presto-server-0.216  presto-server-0.216.tar.gz


[root@slave01 presto-server-0.216]# cd/presto/presto-server-0.216

[root@slave01 presto-server-0.216]# mkdir etc

[root@slave01 presto-server-0.216]# cd etc

在主节点:

vi config.properties

coordinator=true
node-scheduler.include-coordinator=false
http-server.http.port=8080
query.max-memory=50GB
query.max-memory-per-node=1GB
query.max-total-memory-per-node=2GB
discovery-server.enabled=true
discovery.uri=http://master01:8080


[root@master01 etc]# vi jvm.config
-server
-Xmx16G
-XX:+UseG1GC
-XX:G1HeapRegionSize=32M
-XX:+UseGCOverheadLimit
-XX:+ExplicitGCInvokesConcurrent
-XX:+HeapDumpOnOutOfMemoryError
-XX:+ExitOnOutOfMemoryError


[root@master01 etc]# vi log.properties

com.facebook.presto=INFO

[root@master01 etc]# vi node.properties

node.environment=production
node.id=master01
node.data-dir=/presto/data

[root@master01 etc]# pwd
/presto/presto-server-0.216/etc
[root@master01 etc]#mkdir catalog
[root@master01 etc]#cd catalog

[root@master01 catalog]# ls
gp1.properties  gp.properties  hive.properties  postgresql.properties
[root@master01 catalog]#

[root@master01 catalog]# vi hive.properties

connector.name=hive-hadoop2
hive.metastore.uri=thrift://master01:9083
#hive.config.resources=/etc/hadoop/conf/core-site.xml,/etc/hadoop/conf/hdfs-site.xml
#hive.allow-drop-table=true

[root@master01 catalog]# vi postgresql.properties

connector.name=postgresql
connection-url=jdbc:postgresql://192.168.108.126:5432/test
connection-user=postgres
connection-password=passwd

[root@master01 catalog]# vi gp.properties

connector.name=postgresql
connection-url=jdbc:postgresql://192.168.108.185:5432/tutengfei
connection-user=gpadmin
connection-password=gpadmin

[root@master01 catalog]# vi gp1.properties

connector.name=postgresql
connection-url=jdbc:postgresql://192.168.108.185:5432/postgres
connection-user=gpadmin
connection-password=gpadmin

在work节点(slave01,slave02):

[root@slave01 etc]# vi config.properties

coordinator=false
http-server.http.port=8080
query.max-memory=50GB
query.max-memory-per-node=1GB
query.max-total-memory-per-node=2GB
discovery.uri=http://master01:8080


[root@slave01 etc]# vi jvm.config

-server
-Xmx16G
-XX:+UseG1GC
-XX:G1HeapRegionSize=32M
-XX:+UseGCOverheadLimit
-XX:+ExplicitGCInvokesConcurrent
-XX:+HeapDumpOnOutOfMemoryError
-XX:+ExitOnOutOfMemoryError

[root@slave01 etc]# vi log.properties

com.facebook.presto=INFO


[root@slave01 etc]# vi node.properties

node.environment=production
node.id=slave01
node.data-dir=/presto/data

[root@slave01 etc]#mkdir catalog

[root@slave01 etc]#cd catalog

[root@slave01 catalog]# ls
gp1.properties  gp.properties  hive.properties  postgresql.properties 

这几个数据源配置文件跟master01的一模一样。就不详细再列一便了。

在所有节点启动:

[root@master01 presto-server-0.216]# pwd
/presto/presto-server-0.216
[root@master01 presto-server-0.216]# bin/launcher restart


 

在所有节点停止:

[root@master01 presto-server-0.216]# bin/launcher stop

java jdbc 跨数据源测试:

/*
 * www.unisinsight.com Inc.
 * Copyright (c) 2018 All Rights Reserved
 */
package com.test.presto;

import org.apache.flink.table.shaded.org.joda.time.DateTime;

import java.sql.*;
import java.util.ArrayList;
import java.util.List;

/**
 * description
 *
 * @author yuwei [[email protected]]
 * @date 2019/02/18 16:57
 * @since 1.0
 */
public class PrestoTest {

    public static void main(String[] args) {
        String url = "jdbc:presto://master01:8080";
        try {
            Connection connection = DriverManager.getConnection(url, "test", "");
            List<String> resList = new ArrayList<String>();
            Statement stmt = connection.createStatement();
            // select s.* from hive.default.student1 as s
            // 关联hive表和pg表和gp表查询
            ResultSet rset = stmt.executeQuery("select s.id,p.name from gp.public.person as p join hive.default.student1 as s " +
                    "on s.id=p.id join postgresql.public.test1 as t on t.id=s.id");
            while (rset.next()) {
                //列名
                resList.add(rset.getString("name"));
                System.out.println("id:" + rset.getInt("id") + "name:" + rset.getString("name"));
            }
            System.out.println(resList.size());
            stmt.close();
            connection.close();

        } catch (SQLException e) {
            e.printStackTrace();
        }

        gp(url);
    }

    /*
     *
     * 关联同一数据源(gp)下的不同数据库的表进行查询
     *
     *
     * */
    public static void gp(String url) {
        //String url = "jdbc:presto://master01:8080";
        System.out.println(DateTime.now().toString("yyyy-MM-dd HH:mm:ss"));
        try {
            Connection connection = DriverManager.getConnection(url, "test", "");
            List<String> resList = new ArrayList<String>();
            Statement stmt = connection.createStatement();
            // select s.* from hive.default.student1 as s:hive是catalog中的hive.properties,default是数据库名,student1是表
            // gp.public.people_info as p :gp是catalog中的gp.properties,public是scheme,people_info是表,数据库在gp.properties中已经指定了。
            // gp1.public.maimai_copy2 :gp1是catalog中的gp1.properties,public是scheme,people_info是表,数据库在gp1.properties中已经指定了。
            ResultSet rset = stmt.executeQuery("select s.id,p.name from gp.public.people_info as p join gp1.public.maimai_copy2 as s " +
                    "on s.id=p.id ");
            while (rset.next()) {
                //列名
                resList.add(rset.getString("name"));
                // System.out.println("id:" + rset.getInt("id") + "name:" + rset.getString("name"));
            }
            System.out.println(resList.size());
            System.out.println(DateTime.now().toString("yyyy-MM-dd HH:mm:ss"));
            stmt.close();
            connection.close();

        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
}

调用gp() 8千万的数据join 2千万的数据 输出:

2019-02-18 20:55:07
11305126
2019-02-18 20:56:47

Process finished with exit code 0

监控页面:

命令行:

[root@master01 presto]# pwd
/presto

[root@master01 presto]# ./presto --server master01:8080 --catalog hive --schema default
presto:default> show tables;
  Table
----------
 student
 student1
(2 rows)

Query 20190312_075322_00012_2f2gs, FINISHED, 3 nodes
Splits: 36 total, 36 done (100.00%)
0:01 [2 rows, 49B] [3 rows/s, 86B/s]

presto:default> select*from student
student    student1
presto:default> select*from student;
 id |  name   | age
----+---------+-----
  1 | zhngsan |  21
  2 | lisi    |  24
  3 | wangwu  |  54
(3 rows)

Query 20190312_075338_00013_2f2gs, FINISHED, 2 nodes
Splits: 17 total, 17 done (100.00%)
0:03 [3 rows, 49B] [0 rows/s, 16B/s]

presto:default>

# 跨库查询
[root@master01 presto]# ./presto --server master01:8080

presto> select *from hive.default.student s join postgresql.public.person p on  s.id=p.id;
 id |  name   | age | id |    name    | age
----+---------+-----+----+------------+-----
  2 | lisi    |  24 |  2 | LISI       |  65
  3 | wangwu  |  54 |  3 | lisi435342 | 651
  1 | zhngsan |  21 |  1 | zhangSan   |  21
(3 rows)

Query 20190312_075950_00020_2f2gs, FINISHED, 2 nodes
Splits: 114 total, 114 done (100.00%)
0:01 [6 rows, 49B] [5 rows/s, 42B/s]

presto>


 

猜你喜欢

转载自blog.csdn.net/u014384314/article/details/87644058