cluster_ssh.py 文件源码

python
阅读 22 收藏 0 点赞 0 评论 0

项目:aztk 作者: Azure 项目源码 文件源码
def execute(args: typing.NamedTuple):
    spark_client = load_spark_client()
    ssh_conf = SshConfig()

    ssh_conf.merge(
        cluster_id=args.cluster_id,
        username=args.username,
        job_ui_port=args.jobui,
        job_history_ui_port=args.jobhistoryui,
        web_ui_port=args.webui,
        jupyter_port=args.jupyter,
        name_node_ui_port=args.namenodeui,
        rstudio_server_port=args.rstudioserver,
        host=args.host,
        connect=args.connect
    )

    http_prefix = 'http://localhost:'
    log.info("-------------------------------------------")
    log.info("spark cluster id:    %s", ssh_conf.cluster_id)
    log.info("open webui:          %s%s", http_prefix, ssh_conf.web_ui_port)
    log.info("open jobui:          %s%s", http_prefix, ssh_conf.job_ui_port)
    log.info("open jobhistoryui:   %s%s", http_prefix, ssh_conf.job_history_ui_port)
    log.info("open jupyter:        %s%s", http_prefix, ssh_conf.jupyter_port)
    log.info("open namenodeui:     %s%s", http_prefix, ssh_conf.name_node_ui_port)
    log.info("open rstudio server: %s%s", http_prefix, ssh_conf.rstudio_server_port)
    log.info("ssh username:        %s", ssh_conf.username)
    log.info("connect:             %s", ssh_conf.connect)
    log.info("-------------------------------------------")

    # get ssh command
    try:
        ssh_cmd = utils.ssh_in_master(
            client=spark_client,
            cluster_id=ssh_conf.cluster_id,
            webui=ssh_conf.web_ui_port,
            jobui=ssh_conf.job_ui_port,
            jobhistoryui=ssh_conf.job_history_ui_port,
            namenodeui=ssh_conf.name_node_ui_port,
            jupyter=ssh_conf.jupyter_port,
            rstudioserver=ssh_conf.rstudio_server_port,
            username=ssh_conf.username,
            host=ssh_conf.host,
            connect=ssh_conf.connect)

        if not ssh_conf.connect:
            log.info("")
            log.info("Use the following command to connect to your spark head node:")
            log.info("\t%s", ssh_cmd)

    except batch_error.BatchErrorException as e:
        if e.error.code == "PoolNotFound":
            raise aztk.error.AztkError("The cluster you are trying to connect to does not exist.")
        else:
            raise
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号