原创-大数据平台权限设计分享-hive on spark权限问题

2019-05-15  本文已影响0人  无色的叶

在配置hive on spark模式时,使用export HADOOP_USER_NAME=XXX会出现proxy用户无权限问题,一般解决方案是修改配置文件添加如下类型属性

<property>
        <name>httpfs.proxyuser.hadoop.hosts</name>
        <value>*</value>
    </property>
    <property>
        <name>httpfs.proxyuser.hadoop.groups</name>
        <value>*</value>
    </property>

但是该种方式会存在一个问题,虚拟用户是动态添加并分配给不同需求授权使用的,不能每次都修改集群配置,并重启集群,笔者对不论以什么用户执行,强制代理用户都是hadoop用户,这样即无需频繁修改配置,重启集群了

实现思路

修改hdfs-site.xml配置

<property>
        <name>hadoop.security.impersonation.provider.class</name>
        <value>org.apache.hadoop.security.authorize.my.MyDefaultImpersonationProvider</value>
    </property>

MyDefaultImpersonationProvider类authorize方法,强制代理组都是hadoop,操作hive 库表数据权限则交给权限校验服务去完成

/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.hadoop.security.authorize.my;

import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.util.MachineList;

import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.Pattern;

@InterfaceStability.Unstable
@InterfaceAudience.Public
public class MyDefaultImpersonationProvider extends DefaultImpersonationProvider {

    private static final Log LOG = LogFactory.getLog(MyDefaultImpersonationProvider.class);

    private static final String CONF_HOSTS = ".hosts";
    private static final String CONF_USERS = ".users";
    private static final String CONF_GROUPS = ".groups";

    // acl and list of hosts per proxyuser
    private Map<String, AccessControlList> proxyUserAcl = new HashMap<String, AccessControlList>();
    private Map<String, MachineList> proxyHosts = new HashMap<String, MachineList>();


    private String configPrefix;

    public void init(String configurationPrefix) {
        configPrefix = configurationPrefix + (configurationPrefix.endsWith(".") ? "" : ".");

        // constructing regex to match the following patterns:
        //   $configPrefix.[ANY].users
        //   $configPrefix.[ANY].groups
        //   $configPrefix.[ANY].hosts
        //
        String prefixRegEx = configPrefix.replace(".", "\\.");
        String usersGroupsRegEx = prefixRegEx + "[^.]*(" + Pattern.quote(CONF_USERS) + "|" + Pattern.quote(CONF_GROUPS) + ")";
        String hostsRegEx = prefixRegEx + "[^.]*" + Pattern.quote(CONF_HOSTS);
        // get list of users and groups per proxyuser
        Map<String, String> allMatchKeys = super.getConf().getValByRegex(usersGroupsRegEx);
        for (Entry<String, String> entry : allMatchKeys.entrySet()) {
            String aclKey = getAclKey(entry.getKey());
            if (!proxyUserAcl.containsKey(aclKey)) {
                proxyUserAcl.put(aclKey, new AccessControlList(
                        allMatchKeys.get(aclKey + CONF_USERS),
                        allMatchKeys.get(aclKey + CONF_GROUPS)));
            }
        }

        // get hosts per proxyuser
        allMatchKeys = super.getConf().getValByRegex(hostsRegEx);
        for (Entry<String, String> entry : allMatchKeys.entrySet()) {
            proxyHosts.put(entry.getKey(), new MachineList(entry.getValue()));
        }

    }

    @Override
    public void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException {

        UserGroupInformation realUser = user.getRealUser();
        if (realUser == null) {
            return;
        }

        String groupName = "hadoop";
        String key = configPrefix + realUser.getShortUserName();
        if (!proxyUserAcl.containsKey(key)) {
            LOG.info("----proxyUserAcl----" + realUser.getShortUserName() + "---" + groupName);
            proxyUserAcl.put(key, new AccessControlList(realUser.getShortUserName(), groupName));
        }

        String hostKey = configPrefix + realUser.getShortUserName() + CONF_HOSTS;
        if (!proxyHosts.containsKey(hostKey)) {
            proxyHosts.put(hostKey, new MachineList("*"));
        }

        AccessControlList acl = proxyUserAcl.get(configPrefix + realUser.getShortUserName());
        if (acl == null || !acl.isUserAllowed(user)) {
            throw new AuthorizationException("User: " + realUser.getUserName()
                    + " is not allowed to impersonate " + user.getUserName());
        }

        MachineList MachineList = proxyHosts.get(getProxySuperuserIpConfKey(realUser.getShortUserName()));

        if (MachineList == null || !MachineList.includes(remoteAddress)) {
            throw new AuthorizationException("Unauthorized connection for super-user: "
                    + realUser.getUserName() + " from IP " + remoteAddress);
        }
    }

    private String getAclKey(String key) {
        int endIndex = key.lastIndexOf(".");
        if (endIndex != -1) {
            return key.substring(0, endIndex);
        }
        return key;
    }

    /**
     * Returns configuration key for effective usergroups allowed for a superuser
     *
     * @param userName name of the superuser
     * @return configuration key for superuser usergroups
     */
    public String getProxySuperuserUserConfKey(String userName) {
        return configPrefix + userName + CONF_USERS;
    }

    /**
     * Returns configuration key for effective groups allowed for a superuser
     *
     * @param userName name of the superuser
     * @return configuration key for superuser groups
     */
    public String getProxySuperuserGroupConfKey(String userName) {
        return configPrefix + userName + CONF_GROUPS;
    }

    /**
     * Return configuration key for superuser ip addresses
     *
     * @param userName name of the superuser
     * @return configuration key for superuser ip-addresses
     */
    public String getProxySuperuserIpConfKey(String userName) {
        return configPrefix + userName + CONF_HOSTS;
    }

    @VisibleForTesting
    public Map<String, Collection<String>> getProxyGroups() {
        Map<String, Collection<String>> proxyGroups = new HashMap<String, Collection<String>>();
        for (Entry<String, AccessControlList> entry : proxyUserAcl.entrySet()) {
            proxyGroups.put(entry.getKey() + CONF_GROUPS, entry.getValue().getGroups());
        }
        return proxyGroups;
    }

    @VisibleForTesting
    public Map<String, Collection<String>> getProxyHosts() {
        Map<String, Collection<String>> tmpProxyHosts =
                new HashMap<String, Collection<String>>();
        for (Map.Entry<String, MachineList> proxyHostEntry : proxyHosts.entrySet()) {
            tmpProxyHosts.put(proxyHostEntry.getKey(),
                    proxyHostEntry.getValue().getCollection());
        }
        return tmpProxyHosts;
    }

}

上一篇下一篇

猜你喜欢

热点阅读