AMBARI-21581 - Replace Hard Coded conf-select Structures (jonathanhurley)
authorJonathan Hurley <jhurley@hortonworks.com>
Tue, 1 Aug 2017 14:02:37 +0000 (10:02 -0400)
committerJonathan Hurley <jhurley@hortonworks.com>
Wed, 2 Aug 2017 12:45:31 +0000 (08:45 -0400)
171 files changed:
ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_server.py
ambari-server/src/main/resources/common-services/KAFKA/0.10.0.3.0/package/scripts/kafka_broker.py
ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
ambari-server/src/main/resources/common-services/KNOX/0.5.0.3.0/package/scripts/knox_gateway.py
ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_client.py
ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server.py
ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
ambari-server/src/main/resources/common-services/PIG/0.16.1.3.0/package/scripts/pig_client.py
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py [deleted file]
ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/ranger_admin.py
ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/ranger_tagsync.py
ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/ranger_usersync.py
ambari-server/src/main/resources/common-services/RANGER/1.0.0.3.0/package/scripts/upgrade.py
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py [deleted file]
ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/kms_server.py
ambari-server/src/main/resources/common-services/RANGER_KMS/1.0.0.3.0/package/scripts/upgrade.py
ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
ambari-server/src/main/resources/common-services/SLIDER/0.91.0.3.0/package/scripts/slider_client.py
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/livy_server.py
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_client.py
ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/spark_thrift_server.py
ambari-server/src/main/resources/common-services/SPARK/2.2.0/package/scripts/job_history_server.py
ambari-server/src/main/resources/common-services/SPARK/2.2.0/package/scripts/livy_server.py
ambari-server/src/main/resources/common-services/SPARK/2.2.0/package/scripts/spark_client.py
ambari-server/src/main/resources/common-services/SPARK/2.2.0/package/scripts/spark_thrift_server.py
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/job_history_server.py
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/livy2_server.py
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_client.py
ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_thrift_server.py
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop_client.py
ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/sqoop_client.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/drpc_server.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/nimbus_prod.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/pacemaker.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/supervisor_prod.py
ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/ui_server.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/drpc_server.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/nimbus.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/nimbus_prod.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/pacemaker.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisor.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/supervisor_prod.py
ambari-server/src/main/resources/common-services/STORM/1.0.1.3.0/package/scripts/ui_server.py
ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py
ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/application_timeline_server.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/historyserver.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/mapreduce2_client.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/nodemanager.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/resourcemanager.py
ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn_client.py
ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/master.py
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_client.py
ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper_server.py
ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_packages.json [moved from ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json with 81% similarity]
ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_packages.json [moved from ambari-server/src/main/resources/stacks/HDP/3.0/properties/stack_select_packages.json with 80% similarity]
ambari-server/src/test/python/custom_actions/test_ru_set_all.py
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_client.py
ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_drpc_server.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_nimbus_prod.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_supervisor_prod.py
ambari-server/src/test/python/stacks/2.1/STORM/test_storm_ui_server.py
ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
ambari-server/src/test/python/stacks/2.2/ACCUMULO/test_accumulo_client.py
ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py
ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py
ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py
ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_usersync.py
ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py
ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py
ambari-server/src/test/python/stacks/2.2/common/test_conf_select.py
ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
ambari-server/src/test/python/stacks/utils/RMFTestCase.py

index 4f11633..ffcaad5 100644 (file)
@@ -21,9 +21,9 @@ limitations under the License.
 __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", "get_package_dirs"]
 
 # Python Imports
-import copy
 import os
 import subprocess
+import ambari_simplejson as json
 
 # Local Imports
 import stack_select
@@ -41,191 +41,6 @@ from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
 
-STACK_ROOT_PATTERN = "{{ stack_root }}"
-
-_PACKAGE_DIRS = {
-  "atlas": [
-    {
-      "conf_dir": "/etc/atlas/conf",
-      "current_dir": "{0}/current/atlas-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "accumulo": [
-    {
-      "conf_dir": "/etc/accumulo/conf",
-      "current_dir": "{0}/current/accumulo-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "falcon": [
-    {
-      "conf_dir": "/etc/falcon/conf",
-      "current_dir": "{0}/current/falcon-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hadoop": [
-    {
-      "conf_dir": "/etc/hadoop/conf",
-      "current_dir": "{0}/current/hadoop-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hbase": [
-    {
-      "conf_dir": "/etc/hbase/conf",
-      "current_dir": "{0}/current/hbase-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive": [
-    {
-      "conf_dir": "/etc/hive/conf",
-      "current_dir": "{0}/current/hive-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive2": [
-    {
-      "conf_dir": "/etc/hive2/conf",
-      "current_dir": "{0}/current/hive-server2-hive2/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "kafka": [
-    {
-      "conf_dir": "/etc/kafka/conf",
-      "current_dir": "{0}/current/kafka-broker/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "knox": [
-    {
-      "conf_dir": "/etc/knox/conf",
-      "current_dir": "{0}/current/knox-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "mahout": [
-    {
-      "conf_dir": "/etc/mahout/conf",
-      "current_dir": "{0}/current/mahout-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "nifi": [
-    {
-      "conf_dir": "/etc/nifi/conf",
-      "current_dir": "{0}/current/nifi/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "oozie": [
-    {
-      "conf_dir": "/etc/oozie/conf",
-      "current_dir": "{0}/current/oozie-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "phoenix": [
-    {
-      "conf_dir": "/etc/phoenix/conf",
-      "current_dir": "{0}/current/phoenix-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-admin": [
-    {
-      "conf_dir": "/etc/ranger/admin/conf",
-      "current_dir": "{0}/current/ranger-admin/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-tagsync": [
-    {
-      "conf_dir": "/etc/ranger/tagsync/conf",
-      "current_dir": "{0}/current/ranger-tagsync/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-kms": [
-    {
-      "conf_dir": "/etc/ranger/kms/conf",
-      "current_dir": "{0}/current/ranger-kms/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-usersync": [
-    {
-      "conf_dir": "/etc/ranger/usersync/conf",
-      "current_dir": "{0}/current/ranger-usersync/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "slider": [
-    {
-      "conf_dir": "/etc/slider/conf",
-      "current_dir": "{0}/current/slider-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "spark": [
-    {
-      "conf_dir": "/etc/spark/conf",
-      "current_dir": "{0}/current/spark-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "zeppelin": [
-    {
-      "conf_dir": "/etc/zeppelin/conf",
-      "current_dir": "{0}/current/zeppelin-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "spark2": [
-    {
-      "conf_dir": "/etc/spark2/conf",
-      "current_dir": "{0}/current/spark2-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "sqoop": [
-    {
-      "conf_dir": "/etc/sqoop/conf",
-      "current_dir": "{0}/current/sqoop-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "storm": [
-    {
-      "conf_dir": "/etc/storm/conf",
-      "current_dir": "{0}/current/storm-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "tez": [
-    {
-      "conf_dir": "/etc/tez/conf",
-      "current_dir": "{0}/current/tez-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "zookeeper": [
-    {
-      "conf_dir": "/etc/zookeeper/conf",
-      "current_dir": "{0}/current/zookeeper-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "pig": [
-    {
-      "conf_dir": "/etc/pig/conf",
-      "current_dir": "{0}/current/pig-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "flume": [
-    {
-      "conf_dir": "/etc/flume/conf",
-      "current_dir": "{0}/current/flume-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "storm-slider-client": [
-    {
-      "conf_dir": "/etc/storm-slider-client/conf",
-      "current_dir": "{0}/current/storm-slider-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive-hcatalog": [
-    {
-      "conf_dir": "/etc/hive-webhcat/conf",
-      "prefix": "/etc/hive-webhcat",
-      "current_dir": "{0}/current/hive-webhcat/etc/webhcat".format(STACK_ROOT_PATTERN)
-    },
-    {
-      "conf_dir": "/etc/hive-hcatalog/conf",
-      "prefix": "/etc/hive-hcatalog",
-      "current_dir": "{0}/current/hive-webhcat/etc/hcatalog".format(STACK_ROOT_PATTERN)
-    }
-  ]
-}
-
 DIRECTORY_TYPE_BACKUP = "backup"
 DIRECTORY_TYPE_CURRENT = "current"
 
@@ -241,13 +56,35 @@ def get_package_dirs():
   Get package dir mappings
   :return:
   """
+  stack_name = default("/hostLevelParams/stack_name", None)
+  if stack_name is None:
+    raise Fail("The stack name is not present in the command. Packages for conf-select tool cannot be loaded.")
+
+  stack_packages_config = default("/configurations/cluster-env/stack_packages", None)
+  if stack_packages_config is None:
+    raise Fail("The stack packages are not defined on the command. Unable to load packages for the conf-select tool")
+
+  data = json.loads(stack_packages_config)
+
+  if stack_name not in data:
+    raise Fail(
+      "Cannot find conf-select packages for the {0} stack".format(stack_name))
+
+  conf_select_key = "conf-select"
+  data = data[stack_name]
+  if conf_select_key not in data:
+    raise Fail(
+      "There are no conf-select packages defined for this command for the {0} stack".format(stack_name))
+
+  package_dirs = data[conf_select_key]
+
   stack_root = Script.get_stack_root()
-  package_dirs = copy.deepcopy(_PACKAGE_DIRS)
   for package_name, directories in package_dirs.iteritems():
     for dir in directories:
       current_dir = dir['current_dir']
-      current_dir = current_dir.replace(STACK_ROOT_PATTERN, stack_root)
+      current_dir =  current_dir.format(stack_root)
       dir['current_dir'] = current_dir
+
   return package_dirs
 
 def create(stack_name, package, version, dry_run = False):
index 723871b..53c8e9f 100644 (file)
@@ -121,8 +121,6 @@ def get_packages(scope, service_name = None, component_name = None):
   """
   from resource_management.libraries.functions.default import default
 
-  import time
-
   if scope not in _PACKAGE_SCOPES:
     raise Fail("The specified scope of {0} is not valid".format(scope))
 
@@ -140,11 +138,11 @@ def get_packages(scope, service_name = None, component_name = None):
   if stack_name is None:
     raise Fail("The stack name is not present in the command. Packages for stack-select tool cannot be loaded.")
 
-  stack_select_packages_config = default("/configurations/cluster-env/stack_select_packages", None)
-  if stack_select_packages_config is None:
+  stack_packages_config = default("/configurations/cluster-env/stack_packages", None)
+  if stack_packages_config is None:
     raise Fail("The stack packages are not defined on the command. Unable to load packages for the stack-select tool")
 
-  data = json.loads(stack_select_packages_config)
+  data = json.loads(stack_packages_config)
 
   if stack_name not in data:
     raise Fail(
index 34102b6..91a84ea 100644 (file)
@@ -2948,7 +2948,7 @@ public class BlueprintConfigurationProcessor {
     Set<String> properties = Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
-        ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY);
+        ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY);
 
     try {
       Map<String, Map<String, String>> defaultStackProperties = configHelper.getDefaultStackProperties(stackId);
index 5393f81..5ac4c8f 100644 (file)
@@ -92,7 +92,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = "stack_features";
   public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools";
   public static final String CLUSTER_ENV_STACK_ROOT_PROPERTY = "stack_root";
-  public static final String CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY = "stack_select_packages";
+  public static final String CLUSTER_ENV_STACK_PACKAGES_PROPERTY = "stack_packages";
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";
index 216d39d..052edba 100644 (file)
@@ -77,7 +77,7 @@ public class FinalUpgradeCatalog extends AbstractUpgradeCatalog {
    * <ul>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_FEATURES_PROPERTY} from stack</li>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_TOOLS_PROPERTY} from stack</li>
-   * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY} from stack</li>
+   * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_PACKAGES_PROPERTY} from stack</li>
    * </ul>
    *
    * Note: Config properties stack_features and stack_tools should always be updated to latest values as defined
@@ -108,7 +108,7 @@ public class FinalUpgradeCatalog extends AbstractUpgradeCatalog {
         for(PropertyInfo property : properties) {
           if(property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY) ||
               property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY) ||
-              property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY)) {
+              property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY)) {
             propertyMap.put(property.getName(), property.getValue());
           }
         }
index 856446c..ae5181f 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -56,7 +55,6 @@ class AccumuloClient(Script):
       return
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
-    conf_select.select(params.stack_name, "accumulo", params.version)
     stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
index d884bcd..6aafb05 100644 (file)
@@ -21,7 +21,6 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import check_process_status
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -87,7 +86,6 @@ class AccumuloScript(Script):
     stack_component = stack_select.get_package_name()
 
     Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(stack_component))
-    conf_select.select(params.stack_name, "accumulo", params.version)
     stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 4a8210d..e234164 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -35,7 +35,6 @@ class AtlasClient(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version_for_stack_feature_checks):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index 948fe8c..038b723 100644 (file)
@@ -22,7 +22,7 @@ import os
 # Local Imports
 from metadata import metadata
 from resource_management import Fail
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import format_stack_version
@@ -60,7 +60,6 @@ class MetadataServer(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 3f9a5bc..ea1d547 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -35,7 +35,6 @@ class AtlasClient(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index daaa871..cc19858 100644 (file)
@@ -22,7 +22,7 @@ import os
 # Local Imports
 from metadata import metadata
 from resource_management import Fail
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import format_stack_version
@@ -61,7 +61,6 @@ class MetadataServer(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 20623f7..8053dcb 100644 (file)
@@ -22,7 +22,6 @@ from resource_management import Script
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -52,8 +51,6 @@ class DruidBase(Script):
 
     if params.stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
       stack_select.select_packages(params.stack_version)
-    if params.stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
-      conf_select.select(params.stack_name, "druid", params.stack_version)
 
   def start(self, env, upgrade_type=None):
     import params
index 36dab51..a5dd4fb 100644 (file)
@@ -26,7 +26,6 @@ from resource_management.core.resources.system import Execute
 from resource_management.core.source import InlineTemplate
 from resource_management.core.source import Template
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.format import format
@@ -96,8 +95,6 @@ class Superset(Script):
 
     if params.stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
       stack_select.select_packages(params.version)
-    if params.stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
-      conf_select.select(params.stack_name, "superset", params.stack_version)
 
   def start(self, env, upgrade_type=None):
     import params
index f75f34f..540027d 100644 (file)
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from falcon import falcon
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -53,7 +53,6 @@ class FalconClientLinux(FalconClient):
       return
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "falcon", params.version)
     stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
index d547a1a..055d6cb 100644 (file)
@@ -21,7 +21,6 @@ import falcon_server_upgrade
 
 from resource_management.core.logger import Logger
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions.security_commons import build_expectations
@@ -81,7 +80,6 @@ class FalconServerLinux(FalconServer):
       return
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "falcon", params.version)
     stack_select.select_packages(params.version)
 
     falcon_server_upgrade.pre_start_restore()
index a21ecf5..4340618 100644 (file)
@@ -21,7 +21,7 @@ from flume import flume
 from flume import get_desired_state
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names, get_flume_status, get_flume_pid_files
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
@@ -86,7 +86,6 @@ class FlumeHandlerLinux(FlumeHandler):
       return
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "flume", params.version)
     stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 3027bff..650931f 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from hbase import hbase
@@ -56,8 +56,6 @@ class HbaseClientDefault(HbaseClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "hbase", params.version)
-
       # phoenix may not always be deployed
       try:
         stack_select.select_packages(params.version)
@@ -65,12 +63,5 @@ class HbaseClientDefault(HbaseClient):
         print "Ignoring error due to missing phoenix-client"
         print str(e)
 
-
-      # set all of the hadoop clients since hbase client is upgraded as part
-      # of the final "CLIENTS" group and we need to ensure that hadoop-client
-      # is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
-
 if __name__ == "__main__":
   HbaseClient().execute()
index e6dff39..dfc35fb 100644 (file)
@@ -17,7 +17,6 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -57,9 +56,7 @@ class PhoenixQueryServer(Script):
     import params
     env.set_params(params)
 
-    if params.stack_version_formatted and check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
-      # phoenix uses hbase configs
-      conf_select.select(params.stack_name, "hbase", params.version)
+    if params.stack_version_formatted and check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):
       stack_select.select_packages(params.version)
 
 
index a502c1d..b5e2262 100644 (file)
@@ -25,7 +25,7 @@ from resource_management.core import shell
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.decorator import retry
@@ -37,7 +37,6 @@ def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-    conf_select.select(params.stack_name, "hbase", params.version)
     stack_select.select_packages(params.version)
 
 def post_regionserver(env):
index 3027bff..b301c75 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from hbase import hbase
@@ -56,8 +56,6 @@ class HbaseClientDefault(HbaseClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "hbase", params.version)
-
       # phoenix may not always be deployed
       try:
         stack_select.select_packages(params.version)
@@ -66,11 +64,6 @@ class HbaseClientDefault(HbaseClient):
         print str(e)
 
 
-      # set all of the hadoop clients since hbase client is upgraded as part
-      # of the final "CLIENTS" group and we need to ensure that hadoop-client
-      # is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
 
 if __name__ == "__main__":
   HbaseClient().execute()
index 872a5c1..cdd1495 100644 (file)
@@ -17,7 +17,6 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -59,7 +58,6 @@ class PhoenixQueryServer(Script):
 
     if params.stack_version_formatted and check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
-      conf_select.select(params.stack_name, "hbase", params.version)
       stack_select.select_packages(params.version)
 
 
index a502c1d..b5e2262 100644 (file)
@@ -25,7 +25,7 @@ from resource_management.core import shell
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.decorator import retry
@@ -37,7 +37,6 @@ def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-    conf_select.select(params.stack_name, "hbase", params.version)
     stack_select.select_packages(params.version)
 
 def post_regionserver(env):
index 257ccf9..0aa0bc0 100644 (file)
@@ -23,7 +23,7 @@ from ambari_commons.constants import UPGRADE_TYPE_ROLLING
 from hdfs_datanode import datanode
 from resource_management import Script, Fail, shell, Logger
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions import format
@@ -43,8 +43,7 @@ class DataNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = stack_select.get_package_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-datanode")
 
 
   def install(self, env):
@@ -130,7 +129,6 @@ class DataNodeDefault(DataNode):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index 5633cba..0896f30 100644 (file)
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -60,7 +60,6 @@ class HdfsClientDefault(HdfsClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
index bb2895e..75b2eeb 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -49,7 +49,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 47b8021..50bf1e0 100644 (file)
@@ -29,7 +29,6 @@ from ambari_commons import constants
 from resource_management.libraries.script.script import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -71,8 +70,7 @@ class NameNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = stack_select.get_package_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-namenode")
 
   def install(self, env):
     import params
@@ -196,11 +194,6 @@ class NameNodeDefault(NameNode):
     import params
     env.set_params(params)
 
-    # When downgrading an Express Upgrade, the first thing we do is to revert the symlinks.
-    # Therefore, we cannot call this code in that scenario.
-    if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or params.upgrade_direction != Direction.DOWNGRADE:
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
     stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index 66968b7..f16e260 100644 (file)
@@ -24,7 +24,6 @@ from resource_management.libraries.functions.security_commons import build_expec
   FILE_TYPE_XML
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -43,7 +42,6 @@ class NFSGateway(Script):
     env.set_params(params)
 
     if params.stack_version_formatted and check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 0494df0..4977e1c 100644 (file)
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -69,7 +69,6 @@ class SNameNodeDefault(SNameNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       
   def get_log_folder(self):
index 628b01a..955ff60 100644 (file)
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.service import Service
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -133,7 +133,6 @@ class ZkfcSlaveDefault(ZkfcSlave):
     import params
     env.set_params(params)
     if check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version_for_stack_feature_checks):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
index 9f72aa0..d8fb361 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 import datanode_upgrade
 from hdfs_datanode import datanode
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -36,8 +36,7 @@ class DataNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-datanode")
 
 
   def install(self, env):
@@ -84,7 +83,6 @@ class DataNodeDefault(DataNode):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index 5633cba..0896f30 100644 (file)
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -60,7 +60,6 @@ class HdfsClientDefault(HdfsClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
index bb2895e..75b2eeb 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -49,7 +49,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index a904de8..7a0e784 100644 (file)
@@ -29,7 +29,6 @@ from ambari_commons import constants
 from resource_management.libraries.script.script import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -71,8 +70,7 @@ class NameNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-namenode")
 
   def install(self, env):
     import params
@@ -195,8 +193,6 @@ class NameNodeDefault(NameNode):
     # When downgrading an Express Upgrade, the first thing we do is to revert the symlinks.
     # Therefore, we cannot call this code in that scenario.
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or params.upgrade_direction != Direction.DOWNGRADE:
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index ba38526..a3f9d35 100644 (file)
@@ -24,7 +24,6 @@ from resource_management.libraries.functions.security_commons import build_expec
   FILE_TYPE_XML
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -44,7 +43,6 @@ class NFSGateway(Script):
     env.set_params(params)
 
     if params.stack_version_formatted and check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 5a4cc5a..f5ff3e1 100644 (file)
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.security_commons import build_expectations, \
@@ -69,7 +69,6 @@ class SNameNodeDefault(SNameNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 6ea9b52..3ff6a3d 100644 (file)
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.service import Service
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.security_commons import build_expectations
@@ -140,7 +140,6 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version) \
         and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
index e6c9aab..2cdfc31 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 """
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.logger import Logger
@@ -56,8 +56,6 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index 43f0c86..9b5cf43 100644 (file)
@@ -22,7 +22,6 @@ import os
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -105,7 +104,6 @@ class HiveMetastoreDefault(HiveMetastore):
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \
index 6c76af8..080d62b 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -113,7 +112,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
index df2a295..e8e9666 100644 (file)
@@ -36,7 +36,6 @@ from resource_management.core.resources.system import Execute, Directory
 # Imports needed for Rolling/Express Upgrade
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 
@@ -83,7 +82,6 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
         stack_select.select_packages(params.version)
-        conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
         resource_created = copy_to_hdfs(
index 9bd5c6e..efec613 100644 (file)
@@ -19,7 +19,7 @@ Ambari Agent
 
 """
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -76,9 +76,6 @@ class WebHCatServerDefault(WebHCatServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
-      conf_select.select(params.stack_name, "hive-hcatalog", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index e6c9aab..2cdfc31 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 """
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.logger import Logger
@@ -56,8 +56,6 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index 43f0c86..9b5cf43 100644 (file)
@@ -22,7 +22,6 @@ import os
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -105,7 +104,6 @@ class HiveMetastoreDefault(HiveMetastore):
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \
index 6c76af8..080d62b 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -113,7 +112,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
index 3b6fd36..1d2899a 100644 (file)
@@ -36,7 +36,6 @@ from resource_management.core.resources.system import Execute, Directory
 # Imports needed for Rolling/Express Upgrade
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 
@@ -84,7 +83,6 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
         stack_select.select_packages(params.version)
-        conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
         resource_created = copy_to_hdfs(
index 9bd5c6e..e3a4f36 100644 (file)
@@ -19,7 +19,7 @@ Ambari Agent
 
 """
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -77,8 +77,6 @@ class WebHCatServerDefault(WebHCatServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
       # webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
-      conf_select.select(params.stack_name, "hive-hcatalog", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 23fcaed..7ddd1ab 100644 (file)
@@ -19,7 +19,6 @@ limitations under the License.
 from resource_management import Script
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, File, Directory
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions.version import format_stack_version
@@ -51,9 +50,6 @@ class KafkaBroker(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       stack_select.select_packages(params.version)
 
-    if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
-      conf_select.select(params.stack_name, "kafka", params.version)
-
     # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary. 
     if params.current_version and params.version and params.upgrade_direction:
       src_version = dst_version = None
index 23fcaed..0910156 100644 (file)
@@ -19,7 +19,6 @@ limitations under the License.
 from resource_management import Script
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, File, Directory
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import Direction
 from resource_management.libraries.functions.version import format_stack_version
@@ -51,10 +50,7 @@ class KafkaBroker(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       stack_select.select_packages(params.version)
 
-    if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
-      conf_select.select(params.stack_name, "kafka", params.version)
-
-    # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary. 
+    # This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary.
     if params.current_version and params.version and params.upgrade_direction:
       src_version = dst_version = None
       if params.upgrade_direction == Direction.UPGRADE:
index b086bef..456d6b4 100644 (file)
@@ -22,7 +22,7 @@ import os
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -116,8 +116,6 @@ class KnoxGatewayDefault(KnoxGateway):
       absolute_backup_dir = upgrade.backup_data()
       Logger.info("Knox data was successfully backed up to {0}".format(absolute_backup_dir))
 
-    # <conf-selector-tool> will change the symlink to the conf folder.
-    conf_select.select(params.stack_name, "knox", params.version)
     stack_select.select_packages(params.version)
 
     # seed the new Knox data directory with the keystores of yesteryear
index b086bef..456d6b4 100644 (file)
@@ -22,7 +22,7 @@ import os
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.security_commons import build_expectations
 from resource_management.libraries.functions.security_commons import cached_kinit_executor
@@ -116,8 +116,6 @@ class KnoxGatewayDefault(KnoxGateway):
       absolute_backup_dir = upgrade.backup_data()
       Logger.info("Knox data was successfully backed up to {0}".format(absolute_backup_dir))
 
-    # <conf-selector-tool> will change the symlink to the conf folder.
-    conf_select.select(params.stack_name, "knox", params.version)
     stack_select.select_packages(params.version)
 
     # seed the new Knox data directory with the keystores of yesteryear
index b598d17..33ec513 100644 (file)
@@ -21,7 +21,6 @@ Ambari Agent
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.script import Script
 from mahout import mahout
 from resource_management.libraries.functions.default import default
@@ -34,7 +33,6 @@ class MahoutClient(Script):
     import params
     env.set_params(params)
 
-    conf_select.select(params.stack_name, "mahout", params.version)
     stack_select.select_packages(params.version)
 
   def install(self, env):
index f06d90a..6fbd66a 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.logger import Logger
@@ -57,7 +57,6 @@ class OozieClient(Script):
       return
 
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "oozie", params.version)
     stack_select.select_packages(params.version)
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
index e823941..81497bc 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management.core import Logger
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -67,9 +66,6 @@ class OozieServer(Script):
         # Sets the symlink : eg: <stack-root>/current/oozie-server -> <stack-root>/a.b.c.d-<version>/oozie
         stack_select.select_packages(params.version)
 
-      if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
-        conf_select.select(params.stack_name, "oozie", params.version)
-
     env.set_params(params)
     oozie(is_server=True)
 
@@ -122,7 +118,6 @@ class OozieServerDefault(OozieServer):
     Logger.info("Executing Oozie Server Stack Upgrade pre-restart")
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "oozie", params.version)
       stack_select.select_packages(params.version)
 
     OozieUpgrade.prepare_libext_directory()
index f06d90a..6fbd66a 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.logger import Logger
@@ -57,7 +57,6 @@ class OozieClient(Script):
       return
 
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "oozie", params.version)
     stack_select.select_packages(params.version)
 
   # We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
index 8af08b6..6d7766f 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management.core import Logger
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -68,9 +67,6 @@ class OozieServer(Script):
         # Sets the symlink : eg: <stack-root>/current/oozie-server -> <stack-root>/a.b.c.d-<version>/oozie
         stack_select.select_packages(params.version)
 
-      if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
-        conf_select.select(params.stack_name, "oozie", params.version)
-
     env.set_params(params)
     oozie(is_server=True)
 
@@ -123,7 +119,6 @@ class OozieServerDefault(OozieServer):
     Logger.info("Executing Oozie Server Stack Upgrade pre-restart")
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "oozie", params.version)
       stack_select.select_packages(params.version)
 
     OozieUpgrade.prepare_libext_directory()
index 9a33c88..7ded264 100644 (file)
@@ -22,7 +22,7 @@ Ambari Agent
 import sys
 import os
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from pig import pig
@@ -47,8 +47,6 @@ class PigClientLinux(PigClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "pig", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index 9a33c88..7ded264 100644 (file)
@@ -22,7 +22,7 @@ Ambari Agent
 import sys
 import os
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from pig import pig
@@ -47,8 +47,6 @@ class PigClientLinux(PigClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "pig", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index e27a03e..f779c18 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute, File
@@ -33,7 +32,6 @@ from setup_ranger_xml import setup_ranger_audit_solr, setup_ranger_admin_passwd_
 from resource_management.libraries.functions import solr_cloud_util
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING, UPGRADE_TYPE_ROLLING
 from resource_management.libraries.functions.constants import Direction
-import upgrade
 import os, errno
 
 class RangerAdmin(Script):
@@ -75,7 +73,7 @@ class RangerAdmin(Script):
     import params
     env.set_params(params)
 
-    upgrade.prestart(env, "ranger-admin")
+    stack_select.select_packages(params.version)
 
     self.set_ru_rangeradmin_in_progress(params.upgrade_marker_file)
 
@@ -204,11 +202,7 @@ class RangerAdmin(Script):
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
 
-    stack_name = upgrade_stack[0]
-    stack_version = upgrade_stack[1]
-
     stack_select.select_packages(params.version)
-    conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):
     import params
index c9b5c4f..3aae8ff 100644 (file)
@@ -18,7 +18,6 @@ limitations under the License.
 
 """
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -29,7 +28,6 @@ from resource_management.core import shell
 from ranger_service import ranger_service
 from setup_ranger_xml import ranger, ranger_credential_helper
 from resource_management.core.exceptions import Fail
-import upgrade
 
 class RangerTagsync(Script):
 
@@ -85,7 +83,6 @@ class RangerTagsync(Script):
 
     if params.stack_supports_ranger_tagsync:
       Logger.info("Executing Ranger Tagsync Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "ranger-tagsync", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
@@ -109,11 +106,8 @@ class RangerTagsync(Script):
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
 
-    stack_name = upgrade_stack[0]
-    stack_version = upgrade_stack[1]
-
     stack_select.select_packages(params.version)
-    conf_select.select(stack_name, "ranger-tagsync", stack_version)
+
     if params.stack_supports_ranger_tagsync_ssl_xml_support:
       Logger.info("Upgrading Tagsync, stack support Atlas user for Tagsync, creating keystore for same.")
       self.create_atlas_user_keystore(env)
index 8654bc2..cc0075b 100644 (file)
@@ -22,12 +22,12 @@ from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions import stack_select
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 from ranger_service import ranger_service
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING, UPGRADE_TYPE_ROLLING
 from resource_management.libraries.functions.constants import Direction
-import upgrade
 import os
 
 class RangerUsersync(Script):
@@ -107,7 +107,7 @@ class RangerUsersync(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "ranger-usersync")
+    stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
deleted file mode 100644 (file)
index ca1b2bf..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.format import format
-
-def prestart(env, stack_component):
-  import params
-
-  if params.version and params.stack_supports_rolling_upgrade:
-    conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select_packages(params.version)
index b13d6ed..a9e2857 100644 (file)
@@ -20,7 +20,6 @@ limitations under the License.
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.script import Script
 from resource_management.core.resources.system import Execute, File
@@ -68,7 +67,7 @@ class RangerAdmin(Script):
     import params
     env.set_params(params)
 
-    upgrade.prestart(env, "ranger-admin")
+    upgrade.prestart(env)
 
     self.set_ru_rangeradmin_in_progress(params.upgrade_marker_file)
 
@@ -193,7 +192,6 @@ class RangerAdmin(Script):
     stack_version = upgrade_stack[1]
 
     stack_select.select_packages(params.version)
-    conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):
     import params
index 35f9fe6..85963cf 100644 (file)
@@ -18,7 +18,6 @@ limitations under the License.
 
 """
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -85,7 +84,6 @@ class RangerTagsync(Script):
 
     if params.stack_supports_ranger_tagsync:
       Logger.info("Executing Ranger Tagsync Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "ranger-tagsync", params.version)
       stack_select.select_packages(params.version)
 
 
@@ -110,11 +108,8 @@ class RangerTagsync(Script):
     if upgrade_stack is None:
       raise Fail('Unable to determine the stack and stack version')
 
-    stack_name = upgrade_stack[0]
-    stack_version = upgrade_stack[1]
-
     stack_select.select_packages(params.version)
-    conf_select.select(stack_name, "ranger-tagsync", stack_version)
+
     if params.stack_supports_ranger_tagsync_ssl_xml_support:
       Logger.info("Upgrading Tagsync, stack support Atlas user for Tagsync, creating keystore for same.")
       self.create_atlas_user_keystore(env)
index fa5a320..7d5ec29 100644 (file)
@@ -103,7 +103,7 @@ class RangerUsersync(Script):
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "ranger-usersync")
+    upgrade.prestart(env)
 
   def get_log_folder(self):
     import params
index ca1b2bf..597c868 100644 (file)
@@ -19,13 +19,11 @@ limitations under the License.
 
 """
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and params.stack_supports_rolling_upgrade:
-    conf_select.select(params.stack_name, stack_component, params.version)
     stack_select.select_packages(params.version)
index 0bd11f3..fcf2478 100755 (executable)
@@ -29,7 +29,6 @@ from resource_management.core import shell
 from resource_management.libraries.functions.default import default
 from kms import kms, setup_kms_db, setup_java_patch, enable_kms_plugin, setup_kms_jce, update_password_configs
 from kms_service import kms_service
-import upgrade
 
 class KmsServer(Script):
 
@@ -94,7 +93,7 @@ class KmsServer(Script):
     import params
     env.set_params(params)
 
-    upgrade.prestart(env, "ranger-kms")
+    stack_select.select_packages(params.version)
     kms(upgrade_type=upgrade_type)
     setup_java_patch()
 
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
deleted file mode 100644 (file)
index 73c32ff..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.format import format
-
-def prestart(env, stack_component):
-  import params
-
-  if params.version and params.stack_supports_config_versioning:
-    conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select_packages(params.version)
index 3b138fc..4c313c4 100755 (executable)
@@ -86,7 +86,7 @@ class KmsServer(Script):
     import params
     env.set_params(params)
 
-    upgrade.prestart(env, "ranger-kms")
+    upgrade.prestart(env)
     kms(upgrade_type=upgrade_type)
     setup_java_patch()
 
index 73c32ff..465d54e 100644 (file)
@@ -22,9 +22,8 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and params.stack_supports_config_versioning:
-    conf_select.select(params.stack_name, stack_component, params.version)
     stack_select.select_packages(params.version)
index 9284c81..45c7785 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from slider import slider
@@ -38,14 +38,8 @@ class SliderClientLinux(SliderClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "slider", params.version)
       stack_select.select_packages(params.version)
 
-      # also set all of the hadoop clients since slider client is upgraded as
-      # part of the final "CLIENTS" group and we need to ensure that
-      # hadoop-client is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
index 9284c81..45c7785 100644 (file)
@@ -19,7 +19,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from slider import slider
@@ -38,14 +38,8 @@ class SliderClientLinux(SliderClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "slider", params.version)
       stack_select.select_packages(params.version)
 
-      # also set all of the hadoop clients since slider client is upgraded as
-      # part of the final "CLIENTS" group and we need to ensure that
-      # hadoop-client is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
index 9bdc4b9..009adbd 100644 (file)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -73,7 +73,6 @@ class JobHistoryServer(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
index ae26fac..726f94f 100644 (file)
@@ -31,7 +31,7 @@ from resource_management import shell
 from resource_management.libraries.functions.decorator import retry
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 
 from livy_service import livy_service
 from setup_livy import setup_livy
@@ -132,7 +132,6 @@ class LivyServer(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Livy Server Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index a2e26b8..8136a18 100644 (file)
@@ -23,7 +23,7 @@ import sys
 
 # Local imports
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -69,7 +69,6 @@ class SparkClient(Script):
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir, upgrade_type=UPGRADE_TYPE_ROLLING)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -78,7 +77,6 @@ class SparkClient(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Client Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
index 7d595f2..f8e571b 100644 (file)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -68,7 +68,6 @@ class SparkThriftServer(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 9935063..7cbb646 100644 (file)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -74,7 +74,6 @@ class JobHistoryServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
       # TODO, change to "spark" after RPM switches the name
-      conf_select.select(params.stack_name, "spark2", params.version)
       stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
index e801639..1bbb9c3 100644 (file)
@@ -31,7 +31,7 @@ from resource_management import shell
 from resource_management.libraries.functions.decorator import retry
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 
 from livy_service import livy_service
 from setup_livy import setup_livy
@@ -132,7 +132,6 @@ class LivyServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Livy Server Stack Upgrade pre-restart")
       # TODO, change to "spark" and "livy" after RPM switches the name
-      conf_select.select(params.stack_name, "spark2", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 2ff69e7..8c3a52c 100644 (file)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -50,7 +50,6 @@ class SparkClient(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark Client Stack Upgrade pre-restart")
       # TODO, change to "spark" after RPM switches the name
-      conf_select.select(params.stack_name, "spark2", params.version)
       stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
index 6a1bd87..aff1a34 100644 (file)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -68,7 +68,6 @@ class SparkThriftServer(Script):
     env.set_params(params)
     Logger.info("Executing Spark Thrift Server Stack Upgrade pre-restart")
     # TODO, change to "spark" after RPM switches the name
-    conf_select.select(params.stack_name, "spark2", params.version)
     stack_select.select_packages(params.version)
       
   def get_log_folder(self):
index b6a792a..311b6b2 100755 (executable)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -73,7 +73,6 @@ class JobHistoryServer(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark2 Job History Server Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark2", params.version)
       stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
index c710757..50604d4 100644 (file)
@@ -31,7 +31,7 @@ from resource_management import shell
 from resource_management.libraries.functions.decorator import retry
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 
 from livy2_service import livy2_service
 from setup_livy2 import setup_livy
@@ -131,7 +131,6 @@ class LivyServer(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Livy2 Server Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark2", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):
index 6b0cd89..0c7c4c3 100755 (executable)
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -49,7 +49,6 @@ class SparkClient(Script):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       Logger.info("Executing Spark2 Client Stack Upgrade pre-restart")
-      conf_select.select(params.stack_name, "spark", params.version)
       stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
index 7c7982b..a0900f9 100755 (executable)
@@ -22,7 +22,7 @@ import sys
 import os
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -67,7 +67,6 @@ class SparkThriftServer(Script):
 
     env.set_params(params)
     Logger.info("Executing Spark2 Thrift Server Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "spark2", params.version)
     stack_select.select_packages(params.version)
       
   def get_log_folder(self):
index 8c7dd35..6c637b9 100644 (file)
@@ -22,7 +22,6 @@ from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import StackFeature
@@ -50,8 +49,7 @@ class SqoopClientDefault(SqoopClient):
     import params
     env.set_params(params)
 
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "sqoop", params.version)
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       stack_select.select_packages(params.version)
 
 
index 8c7dd35..6c637b9 100644 (file)
@@ -22,7 +22,6 @@ from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import StackFeature
@@ -50,8 +49,7 @@ class SqoopClientDefault(SqoopClient):
     import params
     env.set_params(params)
 
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version): 
-      conf_select.select(params.stack_name, "sqoop", params.version)
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
       stack_select.select_packages(params.version)
 
 
index 06f6de9..7d483b6 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -51,7 +50,6 @@ class DrpcServer(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index bf38a2d..1d000bf 100644 (file)
@@ -22,7 +22,6 @@ import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -55,7 +54,6 @@ class NimbusDefault(Nimbus):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 18d8fea..26c23d4 100644 (file)
@@ -22,7 +22,6 @@ import sys
 from resource_management.libraries.script import Script
 from storm import storm
 from supervisord_service import supervisord_service, supervisord_check_status
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -46,7 +45,6 @@ class Nimbus(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index f111f25..c348c20 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -50,7 +49,6 @@ class PaceMaker(Script):
       env.set_params(params)
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-        conf_select.select(params.stack_name, "storm", params.version)
         stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 6c88a9e..198d17a 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -73,7 +72,6 @@ class SupervisorDefault(Supervisor):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 46626bc..c98346b 100644 (file)
@@ -23,7 +23,6 @@ from storm import storm
 from service import service
 from supervisord_service import supervisord_service, supervisord_check_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -47,7 +46,6 @@ class Supervisor(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 571784c..1f20f36 100644 (file)
@@ -24,7 +24,6 @@ from service import service
 from service_check import ServiceCheck
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Link
@@ -78,7 +77,6 @@ class UiServerDefault(UiServer):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def link_metrics_sink_jar(self):
index 06f6de9..7d483b6 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -51,7 +50,6 @@ class DrpcServer(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index f641137..18823e8 100644 (file)
@@ -22,7 +22,6 @@ import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -55,7 +54,6 @@ class NimbusDefault(Nimbus):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
 
index 18d8fea..26c23d4 100644 (file)
@@ -22,7 +22,6 @@ import sys
 from resource_management.libraries.script import Script
 from storm import storm
 from supervisord_service import supervisord_service, supervisord_check_status
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -46,7 +45,6 @@ class Nimbus(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index f111f25..c348c20 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -50,7 +49,6 @@ class PaceMaker(Script):
       env.set_params(params)
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-        conf_select.select(params.stack_name, "storm", params.version)
         stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 6c88a9e..198d17a 100644 (file)
@@ -21,7 +21,6 @@ limitations under the License.
 import sys
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -73,7 +72,6 @@ class SupervisorDefault(Supervisor):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 46626bc..c98346b 100644 (file)
@@ -23,7 +23,6 @@ from storm import storm
 from service import service
 from supervisord_service import supervisord_service, supervisord_check_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Execute
@@ -47,7 +46,6 @@ class Supervisor(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index 571784c..1f20f36 100644 (file)
@@ -24,7 +24,6 @@ from service import service
 from service_check import ServiceCheck
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.core.resources.system import Link
@@ -78,7 +77,6 @@ class UiServerDefault(UiServer):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "storm", params.version)
       stack_select.select_packages(params.version)
 
   def link_metrics_sink_jar(self):
index 8a6a6d3..ed3f5fd 100644 (file)
@@ -28,7 +28,6 @@ from ambari_commons.os_utils import copy_file, extract_path_component
 
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.source import InlineTemplate
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -72,12 +71,9 @@ class TezClientLinux(TezClient):
     config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, conf_select_name)
 
     if config_dir:
-      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s using version %s" % (conf_select_name, str(params.version)))
-
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -85,8 +81,6 @@ class TezClientLinux(TezClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "tez", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index 8a6a6d3..b42d14e 100644 (file)
@@ -28,7 +28,6 @@ from ambari_commons.os_utils import copy_file, extract_path_component
 
 from resource_management.core.exceptions import ClientComponentHasNoStatus
 from resource_management.core.source import InlineTemplate
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -77,7 +76,6 @@ class TezClientLinux(TezClient):
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
@@ -85,8 +83,6 @@ class TezClientLinux(TezClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "tez", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):
index a435b80..2aec6ba 100644 (file)
@@ -20,7 +20,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import check_process_status
@@ -72,7 +72,6 @@ class ApplicationTimelineServerDefault(ApplicationTimelineServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def status(self, env):
index 30045f8..a93bc17 100644 (file)
@@ -21,7 +21,7 @@ Ambari Agent
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -74,7 +74,6 @@ class HistoryServerDefault(HistoryServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
index efcb2da..234e931 100644 (file)
@@ -24,7 +24,7 @@ import sys
 
 # Local imports
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -66,12 +66,9 @@ class MapReduce2Client(Script):
     config_dir = self.get_config_dir_during_stack_upgrade(env, base_dir, conf_select_name)
 
     if config_dir:
-      Logger.info("stack_upgrade_save_new_config(): Calling conf-select on %s using version %s" % (conf_select_name, str(params.version)))
-
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
 
@@ -87,7 +84,6 @@ class MapReduce2ClientDefault(MapReduce2Client):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index ed83402..280fc2f 100644 (file)
@@ -22,7 +22,7 @@ Ambari Agent
 import nodemanager_upgrade
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -72,7 +72,6 @@ class NodemanagerDefault(Nodemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index ecaea4c..7885c94 100644 (file)
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -111,7 +110,6 @@ class ResourcemanagerDefault(Resourcemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
index ef4f7ea..b6a89b4 100644 (file)
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -56,7 +56,6 @@ class YarnClientDefault(YarnClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index a435b80..2aec6ba 100644 (file)
@@ -20,7 +20,7 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import check_process_status
@@ -72,7 +72,6 @@ class ApplicationTimelineServerDefault(ApplicationTimelineServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def status(self, env):
index 3938c15..d89e5b5 100644 (file)
@@ -21,7 +21,7 @@ Ambari Agent
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -74,7 +74,6 @@ class HistoryServerDefault(HistoryServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       # MC Hammer said, "Can't touch this"
       copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
index efcb2da..79e3158 100644 (file)
@@ -24,7 +24,7 @@ import sys
 
 # Local imports
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -71,7 +71,6 @@ class MapReduce2Client(Script):
       # Because this script was called from ru_execute_tasks.py which already enters an Environment with its own basedir,
       # must change it now so this function can find the Jinja Templates for the service.
       env.config.basedir = base_dir
-      conf_select.select(params.stack_name, conf_select_name, params.version)
       self.configure(env, config_dir=config_dir)
 
 
@@ -87,7 +86,6 @@ class MapReduce2ClientDefault(MapReduce2Client):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index ed83402..280fc2f 100644 (file)
@@ -22,7 +22,7 @@ Ambari Agent
 import nodemanager_upgrade
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.check_process_status import check_process_status
@@ -72,7 +72,6 @@ class NodemanagerDefault(Nodemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index 4ceff1c..55214f6 100644 (file)
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
@@ -111,7 +110,6 @@ class ResourcemanagerDefault(Resourcemanager):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def disable_security(self, env):
index ef4f7ea..b6a89b4 100644 (file)
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -56,7 +56,6 @@ class YarnClientDefault(YarnClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 
index f8f6e3d..7f506c5 100644 (file)
@@ -30,7 +30,6 @@ from resource_management.core.source import StaticFile
 from resource_management.libraries import XmlConfig
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.decorator import retry
@@ -251,7 +250,6 @@ class Master(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zeppelin", params.version)
       stack_select.select_packages(params.version)
 
   def set_interpreter_settings(self, config_data):
index c4fdfcc..ab798ec 100644 (file)
@@ -30,7 +30,6 @@ from resource_management.core.source import StaticFile
 from resource_management.libraries import XmlConfig
 from resource_management.libraries.functions.check_process_status import check_process_status
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.decorator import retry
@@ -253,7 +252,6 @@ class Master(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zeppelin", params.version)
       stack_select.select_packages(params.version)
 
   def set_interpreter_settings(self, config_data):
index e52522a..39daea4 100644 (file)
@@ -21,7 +21,7 @@ Ambari Agent
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.format import format
@@ -67,7 +67,6 @@ class ZookeeperClientLinux(ZookeeperClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
index 8d6acd9..f0e4ab9 100644 (file)
@@ -24,7 +24,6 @@ from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import get_unique_id_and_date
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.version import format_stack_version
@@ -74,7 +73,6 @@ class ZookeeperServerLinux(ZookeeperServer):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ROLLING_UPGRADE, format_stack_version(params.version)):
-      conf_select.select(params.stack_name, "zookeeper", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
index eb3fb5d..ef5607e 100644 (file)
@@ -205,10 +205,6 @@ class InstallPackages(Script):
       Link("/usr/bin/conf-select", to = "/usr/bin/hdfconf-select")
 
     for package_name, directories in conf_select.get_package_dirs().iteritems():
-      conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
-      Logger.info("The current cluster stack of {0} does not require backing up configurations; "
-                  "only {1} versioned config directories will be created.".format(stack_version, conf_selector_name))
-      # only link configs for all known packages
       conf_select.select(self.stack_name, package_name, stack_version, ignore_errors = True)
 
 
index 95f7323..ef9ea29 100644 (file)
@@ -79,7 +79,6 @@ class UpgradeSetAll(Script):
           link_config(dir_def['conf_dir'], dir_def['current_dir'])
 
 
-
 def is_host_skippable(stack_selector_path, formatted_version):
   """
   Gets whether this host should not have the stack select tool called.
index 1915e9f..45f3b42 100644 (file)
@@ -262,14 +262,14 @@ gpgcheck=0</value>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
-  <!-- Define stack_select_packages property in the base stack. DO NOT override this property for each stack version -->
+  <!-- Define stack_packages property in the base stack. DO NOT override this property for each stack version -->
   <property>
-    <name>stack_select_packages</name>
+    <name>stack_packages</name>
     <value/>
     <description>Associations between component and stack-select tools.</description>
     <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
     <value-attributes>
-      <property-file-name>stack_select_packages.json</property-file-name>
+      <property-file-name>stack_packages.json</property-file-name>
       <property-file-type>json</property-file-type>
       <read-only>true</read-only>
       <overridable>false</overridable>
index 8b61a93..daaffd8 100644 (file)
@@ -50,8 +50,8 @@ def setup_stack_symlinks(struct_out_file):
     return
 
   # get the packages which the stack-select tool should be used on
-  stack_select_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
-  if stack_select_packages is None:
+  stack_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_packages is None:
     return
 
   json_version = load_version(struct_out_file)
@@ -62,7 +62,7 @@ def setup_stack_symlinks(struct_out_file):
 
   # On parallel command execution this should be executed by a single process at a time.
   with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-    for package in stack_select_packages:
+    for package in stack_packages:
       stack_select.select(package, json_version)
 
 
@@ -123,10 +123,6 @@ def link_configs(struct_out_file):
   """
   import params
 
-  if not Script.is_stack_greater_or_equal("2.3"):
-    Logger.info("Can only link configs for HDP-2.3 and higher.")
-    return
-
   json_version = load_version(struct_out_file)
 
   if not json_version:
           ]
         }
       }
+    },
+    "conf-select": {
+      "accumulo": [
+        {
+          "conf_dir": "/etc/accumulo/conf",
+          "current_dir": "{0}/current/accumulo-client/conf"
+        }
+      ],
+      "atlas": [
+        {
+          "conf_dir": "/etc/atlas/conf",
+          "current_dir": "{0}/current/atlas-client/conf"
+        }
+      ],
+      "druid": [
+        {
+          "conf_dir": "/etc/druid/conf",
+          "current_dir": "{0}/current/druid-overlord/conf"
+        }
+      ],
+      "falcon": [
+        {
+          "conf_dir": "/etc/falcon/conf",
+          "current_dir": "{0}/current/falcon-client/conf"
+        }
+      ],
+      "flume": [
+        {
+          "conf_dir": "/etc/flume/conf",
+          "current_dir": "{0}/current/flume-server/conf"
+        }
+      ],
+      "hadoop": [
+        {
+          "conf_dir": "/etc/hadoop/conf",
+          "current_dir": "{0}/current/hadoop-client/conf"
+        }
+      ],
+      "hbase": [
+        {
+          "conf_dir": "/etc/hbase/conf",
+          "current_dir": "{0}/current/hbase-client/conf"
+        }
+      ],
+      "hive": [
+        {
+          "conf_dir": "/etc/hive/conf",
+          "current_dir": "{0}/current/hive-client/conf"
+        }
+      ],
+      "hive2": [
+        {
+          "conf_dir": "/etc/hive2/conf",
+          "current_dir": "{0}/current/hive-server2-hive2/conf"
+        }
+      ],
+      "hive-hcatalog": [
+        {
+          "conf_dir": "/etc/hive-webhcat/conf",
+          "prefix": "/etc/hive-webhcat",
+          "current_dir": "{0}/current/hive-webhcat/etc/webhcat"
+        },
+        {
+          "conf_dir": "/etc/hive-hcatalog/conf",
+          "prefix": "/etc/hive-hcatalog",
+          "current_dir": "{0}/current/hive-webhcat/etc/hcatalog"
+        }
+      ],
+      "kafka": [
+        {
+          "conf_dir": "/etc/kafka/conf",
+          "current_dir": "{0}/current/kafka-broker/conf"
+        }
+      ],
+      "knox": [
+        {
+          "conf_dir": "/etc/knox/conf",
+          "current_dir": "{0}/current/knox-server/conf"
+        }
+      ],
+      "mahout": [
+        {
+          "conf_dir": "/etc/mahout/conf",
+          "current_dir": "{0}/current/mahout-client/conf"
+        }
+      ],
+      "nifi": [
+        {
+          "conf_dir": "/etc/nifi/conf",
+          "current_dir": "{0}/current/nifi/conf"
+        }
+      ],
+      "oozie": [
+        {
+          "conf_dir": "/etc/oozie/conf",
+          "current_dir": "{0}/current/oozie-client/conf"
+        }
+      ],
+      "phoenix": [
+        {
+          "conf_dir": "/etc/phoenix/conf",
+          "current_dir": "{0}/current/phoenix-client/conf"
+        }
+      ],
+      "pig": [
+        {
+          "conf_dir": "/etc/pig/conf",
+          "current_dir": "{0}/current/pig-client/conf"
+        }
+      ],
+      "ranger-admin": [
+        {
+          "conf_dir": "/etc/ranger/admin/conf",
+          "current_dir": "{0}/current/ranger-admin/conf"
+        }
+      ],
+      "ranger-kms": [
+        {
+          "conf_dir": "/etc/ranger/kms/conf",
+          "current_dir": "{0}/current/ranger-kms/conf"
+        }
+      ],
+      "ranger-tagsync": [
+        {
+          "conf_dir": "/etc/ranger/tagsync/conf",
+          "current_dir": "{0}/current/ranger-tagsync/conf"
+        }
+      ],
+      "ranger-usersync": [
+        {
+          "conf_dir": "/etc/ranger/usersync/conf",
+          "current_dir": "{0}/current/ranger-usersync/conf"
+        }
+      ],
+      "slider": [
+        {
+          "conf_dir": "/etc/slider/conf",
+          "current_dir": "{0}/current/slider-client/conf"
+        }
+      ],
+      "spark": [
+        {
+          "conf_dir": "/etc/spark/conf",
+          "current_dir": "{0}/current/spark-client/conf"
+        }
+      ],
+      "spark2": [
+        {
+          "conf_dir": "/etc/spark2/conf",
+          "current_dir": "{0}/current/spark2-client/conf"
+        }
+      ],
+      "sqoop": [
+        {
+          "conf_dir": "/etc/sqoop/conf",
+          "current_dir": "{0}/current/sqoop-client/conf"
+        }
+      ],
+      "storm": [
+        {
+          "conf_dir": "/etc/storm/conf",
+          "current_dir": "{0}/current/storm-client/conf"
+        }
+      ],
+      "storm-slider-client": [
+        {
+          "conf_dir": "/etc/storm-slider-client/conf",
+          "current_dir": "{0}/current/storm-slider-client/conf"
+        }
+      ],
+      "superset": [
+        {
+          "conf_dir": "/etc/druid-superset/conf",
+          "current_dir": "{0}/current/druid-superset/conf"
+        }
+      ],
+      "tez": [
+        {
+          "conf_dir": "/etc/tez/conf",
+          "current_dir": "{0}/current/tez-client/conf"
+        }
+      ],
+      "zeppelin": [
+        {
+          "conf_dir": "/etc/zeppelin/conf",
+          "current_dir": "{0}/current/zeppelin-server/conf"
+        }
+      ],
+      "zookeeper": [
+        {
+          "conf_dir": "/etc/zookeeper/conf",
+          "current_dir": "{0}/current/zookeeper-client/conf"
+        }
+      ]
     }
   }
-}
\ No newline at end of file
+}
           ]
         }
       }
+    },
+    "conf-select": {
+      "accumulo": [
+        {
+          "conf_dir": "/etc/accumulo/conf",
+          "current_dir": "{0}/current/accumulo-client/conf"
+        }
+      ],
+      "atlas": [
+        {
+          "conf_dir": "/etc/atlas/conf",
+          "current_dir": "{0}/current/atlas-client/conf"
+        }
+      ],
+      "druid": [
+        {
+          "conf_dir": "/etc/druid/conf",
+          "current_dir": "{0}/current/druid-overlord/conf"
+        }
+      ],
+      "falcon": [
+        {
+          "conf_dir": "/etc/falcon/conf",
+          "current_dir": "{0}/current/falcon-client/conf"
+        }
+      ],
+      "flume": [
+        {
+          "conf_dir": "/etc/flume/conf",
+          "current_dir": "{0}/current/flume-server/conf"
+        }
+      ],
+      "hadoop": [
+        {
+          "conf_dir": "/etc/hadoop/conf",
+          "current_dir": "{0}/current/hadoop-client/conf"
+        }
+      ],
+      "hbase": [
+        {
+          "conf_dir": "/etc/hbase/conf",
+          "current_dir": "{0}/current/hbase-client/conf"
+        }
+      ],
+      "hive": [
+        {
+          "conf_dir": "/etc/hive/conf",
+          "current_dir": "{0}/current/hive-client/conf"
+        }
+      ],
+      "hive2": [
+        {
+          "conf_dir": "/etc/hive2/conf",
+          "current_dir": "{0}/current/hive-server2-hive2/conf"
+        }
+      ],
+      "hive-hcatalog": [
+        {
+          "conf_dir": "/etc/hive-webhcat/conf",
+          "prefix": "/etc/hive-webhcat",
+          "current_dir": "{0}/current/hive-webhcat/etc/webhcat"
+        },
+        {
+          "conf_dir": "/etc/hive-hcatalog/conf",
+          "prefix": "/etc/hive-hcatalog",
+          "current_dir": "{0}/current/hive-webhcat/etc/hcatalog"
+        }
+      ],
+      "kafka": [
+        {
+          "conf_dir": "/etc/kafka/conf",
+          "current_dir": "{0}/current/kafka-broker/conf"
+        }
+      ],
+      "knox": [
+        {
+          "conf_dir": "/etc/knox/conf",
+          "current_dir": "{0}/current/knox-server/conf"
+        }
+      ],
+      "mahout": [
+        {
+          "conf_dir": "/etc/mahout/conf",
+          "current_dir": "{0}/current/mahout-client/conf"
+        }
+      ],
+      "nifi": [
+        {
+          "conf_dir": "/etc/nifi/conf",
+          "current_dir": "{0}/current/nifi/conf"
+        }
+      ],
+      "oozie": [
+        {
+          "conf_dir": "/etc/oozie/conf",
+          "current_dir": "{0}/current/oozie-client/conf"
+        }
+      ],
+      "phoenix": [
+        {
+          "conf_dir": "/etc/phoenix/conf",
+          "current_dir": "{0}/current/phoenix-client/conf"
+        }
+      ],
+      "pig": [
+        {
+          "conf_dir": "/etc/pig/conf",
+          "current_dir": "{0}/current/pig-client/conf"
+        }
+      ],
+      "ranger-admin": [
+        {
+          "conf_dir": "/etc/ranger/admin/conf",
+          "current_dir": "{0}/current/ranger-admin/conf"
+        }
+      ],
+      "ranger-kms": [
+        {
+          "conf_dir": "/etc/ranger/kms/conf",
+          "current_dir": "{0}/current/ranger-kms/conf"
+        }
+      ],
+      "ranger-tagsync": [
+        {
+          "conf_dir": "/etc/ranger/tagsync/conf",
+          "current_dir": "{0}/current/ranger-tagsync/conf"
+        }
+      ],
+      "ranger-usersync": [
+        {
+          "conf_dir": "/etc/ranger/usersync/conf",
+          "current_dir": "{0}/current/ranger-usersync/conf"
+        }
+      ],
+      "slider": [
+        {
+          "conf_dir": "/etc/slider/conf",
+          "current_dir": "{0}/current/slider-client/conf"
+        }
+      ],
+      "spark": [
+        {
+          "conf_dir": "/etc/spark/conf",
+          "current_dir": "{0}/current/spark-client/conf"
+        }
+      ],
+      "spark2": [
+        {
+          "conf_dir": "/etc/spark2/conf",
+          "current_dir": "{0}/current/spark2-client/conf"
+        }
+      ],
+      "sqoop": [
+        {
+          "conf_dir": "/etc/sqoop/conf",
+          "current_dir": "{0}/current/sqoop-client/conf"
+        }
+      ],
+      "storm": [
+        {
+          "conf_dir": "/etc/storm/conf",
+          "current_dir": "{0}/current/storm-client/conf"
+        }
+      ],
+      "storm-slider-client": [
+        {
+          "conf_dir": "/etc/storm-slider-client/conf",
+          "current_dir": "{0}/current/storm-slider-client/conf"
+        }
+      ],
+      "superset": [
+        {
+          "conf_dir": "/etc/druid-superset/conf",
+          "current_dir": "{0}/current/druid-superset/conf"
+        }
+      ],
+      "tez": [
+        {
+          "conf_dir": "/etc/tez/conf",
+          "current_dir": "{0}/current/tez-client/conf"
+        }
+      ],
+      "zeppelin": [
+        {
+          "conf_dir": "/etc/zeppelin/conf",
+          "current_dir": "{0}/current/zeppelin-server/conf"
+        }
+      ],
+      "zookeeper": [
+        {
+          "conf_dir": "/etc/zookeeper/conf",
+          "current_dir": "{0}/current/zookeeper-client/conf"
+        }
+      ]
     }
   }
-}
\ No newline at end of file
+}
index a488a96..de83f7e 100644 (file)
@@ -115,10 +115,12 @@ class TestRUSetAll(RMFTestCase):
     with open(json_file_path, "r") as json_file:
       json_payload = json.load(json_file)
 
+    json_payload['hostLevelParams']['stack_name'] = "HDP"
     json_payload['hostLevelParams']['stack_version'] = "2.3"
     json_payload['commandParams']['version'] = "2.3.0.0-1234"
     json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
     json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
+    json_payload["configurations"]["cluster-env"]["stack_packages"] = self.get_stack_packages()
 
     config_dict = ConfigDictionary(json_payload)
 
index 7c5c7f5..135b239 100644 (file)
@@ -244,25 +244,8 @@ class TestHBaseClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-client', version), sudo=True)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-client', version), sudo=True)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
-
-    self.assertEquals(3, mocks_dict['call'].call_count)
-    self.assertEquals(6, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[5][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
-
index 42289e1..a28c3f9 100644 (file)
@@ -776,18 +776,7 @@ class TestHBaseMaster(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-master', version), sudo=True)
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(3, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
index 6a2d8fb..6f27ecc 100644 (file)
@@ -586,17 +586,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-regionserver', version), sudo=True)
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(3, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hbase', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
index 973e274..972aa61 100644 (file)
@@ -439,10 +439,5 @@ class TestPhoenixQueryServer(RMFTestCase):
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)
 
-    self.assertResourceCalled('Directory', '/etc/hbase/2.3.0.0-1234/0',
-        create_parents = True,
-        mode = 0755,
-        cd_access = 'a',
-    )
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-server', '2.3.0.0-1234'), sudo=True)
     self.assertNoMoreResources()
index 0f31ad2..966254a 100644 (file)
@@ -510,22 +510,11 @@ class TestDatanode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-datanode', version), sudo=True,)
 
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
 
   @patch("socket.gethostbyname")
   @patch('time.sleep')
index 680c984..7a70578 100644 (file)
@@ -107,21 +107,11 @@ class Test(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
   def test_pre_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
index 06c5fdd..22e4827 100644 (file)
@@ -412,16 +412,7 @@ class TestJournalnode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-journalnode', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index eb595c1..b26c8fb 100644 (file)
@@ -1351,12 +1351,8 @@ class TestNamenode(RMFTestCase):
                        config_file = "nn_eu_standby.json",
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None), (0, None, ''), (0, None)] ,
                        mocks_dict=mocks_dict)
 
-    calls = mocks_dict['call'].call_args_list
-    self.assertTrue(len(calls) >= 1)
-    self.assertTrue(calls[0].startsWith("conf-select create-conf-dir --package hadoop --stack-version 2.3.2.0-2844 --conf-version 0"))
 
 
   @patch("hdfs_namenode.is_this_namenode_active")
@@ -1429,10 +1425,8 @@ class TestNamenode(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', '/etc/hadoop/conf', to='/usr/hdp/current/hadoop-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-namenode', version), sudo=True)
     self.assertNoMoreResources()
 
@@ -1665,11 +1659,8 @@ class TestNamenode(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = itertools.cycle([(0, None, None)]),
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-      to = '/usr/hdp/current/hadoop-client/conf')
 
     import sys
     self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
index 773d3fe..4317c30 100644 (file)
@@ -295,9 +295,7 @@ class TestNFSGateway(RMFTestCase):
                        config_dict = json_content,
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None), (0, None), (0, None)])
-    self.assertResourceCalled('Link', ('/etc/hadoop/conf'), to='/usr/hdp/current/hadoop-client/conf')
+                       target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-hdfs-nfs3', version), sudo=True,)
     self.assertNoMoreResources()
index 3bc597e..f7af5b9 100644 (file)
@@ -227,25 +227,8 @@ class TestHiveClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
index fc6d14e..033680c 100644 (file)
@@ -910,10 +910,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute',
 
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-server2', version), sudo=True,)
@@ -932,12 +930,3 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index a6a4fa0..943c201 100644 (file)
@@ -314,7 +314,6 @@ class TestWebHCatServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertTrue("params" in sys.modules)
@@ -325,20 +324,6 @@ class TestWebHCatServer(RMFTestCase):
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-webhcat', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hive-hcatalog', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hive-hcatalog', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
 
   @patch("resource_management.core.shell.call")
   def test_rolling_restart_configure(self, call_mock):
index 31d54ae..f160029 100644 (file)
@@ -270,19 +270,8 @@ class TestOozieClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/oozie/conf'), to='/usr/hdp/current/oozie-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 17b8abf..dfa22fd 100644 (file)
@@ -1259,7 +1259,7 @@ class TestOozieServer(RMFTestCase):
      config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
-     call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
+     call_mocks = [(0, prepare_war_stdout)],
      mocks_dict = mocks_dict)
 
     self.assertTrue(isfile_mock.called)
@@ -1270,10 +1270,6 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
-    self.assertResourceCalled('Link', '/etc/oozie/conf',
-                              to = '/usr/hdp/current/oozie-client/conf',
-    )
-
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
@@ -1284,17 +1280,6 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/oozie-server/libext/ext-2.2.zip', mode = 0644)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
 
   @patch("os.path.isdir")
   @patch("os.path.exists")
@@ -1505,7 +1490,7 @@ class TestOozieServer(RMFTestCase):
      classname = "OozieServer", command = "pre_upgrade_restart", config_dict = json_content,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
-     call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
+     call_mocks = [(0, prepare_war_stdout)],
      mocks_dict = mocks_dict)
 
     self.assertTrue(isfile_mock.called)
@@ -1516,7 +1501,6 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
-    self.assertResourceCalled('Link', '/etc/oozie/conf', to = '/usr/hdp/current/oozie-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
@@ -1532,14 +1516,3 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop /usr/hdp/current/oozie-server/libext/falcon-oozie-el-extension-*.jar')
 
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 63076f9..3c4f899 100644 (file)
@@ -167,24 +167,8 @@ class TestPigClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'pig', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'pig', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
index 4622ae3..053d44a 100644 (file)
@@ -141,17 +141,6 @@ class TestSqoop(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/sqoop/conf'), to='/usr/hdp/current/sqoop-client/conf')
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'sqoop-client', version), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'sqoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'sqoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index e156af2..ba3d0ab 100644 (file)
@@ -791,6 +791,3 @@ class TestHistoryServer(RMFTestCase):
     )
 
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
index 5898355..75eff39 100644 (file)
@@ -416,21 +416,11 @@ class TestMapReduce2Client(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
   def test_stack_upgrade_save_new_config(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json"
     with open(config_file, "r") as f:
index 4281696..642043d 100644 (file)
@@ -663,17 +663,7 @@ class TestNodeManager(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-nodemanager', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 652fea8..8e92116 100644 (file)
@@ -552,17 +552,7 @@ class TestResourceManager(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-resourcemanager', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 09a6278..375028a 100644 (file)
@@ -583,17 +583,7 @@ class TestYarnClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index f074036..4aa18de 100644 (file)
@@ -197,19 +197,9 @@ class TestZookeeperClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'zookeeper-client', version), sudo=True)
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'zookeeper', '--stack-version', '2.3.0.0-3242', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'zookeeper', '--stack-version', '2.3.0.0-3242', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
     self.assertNoMoreResources()
index bc58e56..3cbfbd6 100644 (file)
@@ -284,22 +284,10 @@ class TestZookeeperServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalledIgnoreEarlier('Link', ('/etc/zookeeper/conf'), to='/etc/zookeeper/conf.backup')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'zookeeper-server', version), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'zookeeper', '--stack-version', '2.3.0.0-3242', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'zookeeper', '--stack-version', '2.3.0.0-3242', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-
     self.assertNoMoreResources()
 
   @patch.object(resource_management.libraries.functions, "get_unique_id_and_date")
index caef738..f1da837 100644 (file)
@@ -19,15 +19,26 @@ limitations under the License.
 '''
 
 import json
-from mock.mock import MagicMock, call, patch
+from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
+from resource_management.core.logger import Logger
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.script import Script
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
 @patch("os.path.isfile", new = MagicMock(return_value=False))
 class TestHookAfterInstall(RMFTestCase):
   CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"HIVE_SERVER"}
 
+  def setUp(self):
+    Logger.initialize_logger()
+
+    Script.config = dict()
+    Script.config.update( { "configurations" : { "cluster-env" : {} }, "hostLevelParams": {} } )
+    Script.config["configurations"]["cluster-env"]["stack_packages"] = RMFTestCase.get_stack_packages()
+    Script.config["hostLevelParams"] = { "stack_name" : "HDP" }
+
+
   def test_hook_default(self):
 
     self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
index dcc649d..9de2156 100644 (file)
@@ -126,18 +126,8 @@ class TestFalconClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-client', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'falcon', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'falcon', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index a7e6a1e..2c877c8 100644 (file)
@@ -470,7 +470,6 @@ class TestFalconServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
@@ -487,12 +486,3 @@ class TestFalconServer(RMFTestCase):
         action = ['delete'],
     )
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'falcon', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'falcon', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index e3ed890..630ac3b 100644 (file)
@@ -443,22 +443,12 @@ class TestHiveMetastore(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hive', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
 
   def test_pre_upgrade_restart_ims(self):
     """
@@ -558,11 +548,8 @@ class TestHiveMetastore(RMFTestCase):
       config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
-      call_mocks = [(0, None, ''), (0, None)],
       mocks_dict = mocks_dict)
 
-    # conf-select, hdp-select BEFORE upgrade schema calls
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
     self.assertResourceCalled('Execute', ('ambari-python-wrap',
      '/usr/bin/hdp-select',
      'set',
@@ -768,11 +755,8 @@ class TestHiveMetastore(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
-    self.assertResourceCalled('Link', ('/etc/hive/conf'), to='/usr/hdp/current/hive-client/conf')
-
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
 
 
index 608b156..220b422 100644 (file)
@@ -167,16 +167,6 @@ class TestStormDrpcServer(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index ab5da22..a2f2e5e 100644 (file)
@@ -305,18 +305,7 @@ class TestStormNimbus(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-nimbus', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertNoMoreResources()
index efa50c8..df978b9 100644 (file)
@@ -141,18 +141,7 @@ class TestStormNimbus(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None, '')],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-nimbus', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertNoMoreResources()
index 116597b..0232b45 100644 (file)
@@ -206,17 +206,7 @@ class TestStormSupervisor(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-supervisor', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 46221a9..f995d89 100644 (file)
@@ -184,18 +184,7 @@ class TestStormSupervisor(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-supervisor', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertNoMoreResources()
index 985c754..ee81eed 100644 (file)
@@ -236,16 +236,6 @@ class TestStormUiServer(TestStormBase):
                      config_overrides = self.CONFIG_OVERRIDES,
                      stack_version = self.STACK_VERSION,
                      target = RMFTestCase.TARGET_COMMON_SERVICES,
-                     call_mocks = [(0, None, ''), (0, None)],
                      mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'storm-client', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'storm', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index bd7567a..fad99f6 100644 (file)
@@ -110,26 +110,11 @@ class TestTezClient(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'tez', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'tez', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
 
   def test_stack_upgrade_save_new_config(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.1/configs/client-upgrade.json"
index ad2d25f..b352920 100644 (file)
@@ -284,17 +284,7 @@ class TestAppTimelineServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-yarn-timelineserver', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 3ffd0a9..41bc815 100644 (file)
@@ -68,18 +68,7 @@ class TestAccumuloClient(RMFTestCase):
       config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
-      call_mocks = [(0, None, ''), (0, None)],
       mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'accumulo-client', version), sudo=True,)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'accumulo', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'accumulo', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 477830e..7433dd3 100644 (file)
@@ -158,20 +158,9 @@ class TestKafkaBroker(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute',
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'kafka-broker', version), sudo=True,)
 
-    self.assertResourceCalled("Link", "/etc/kafka/conf", to="/usr/hdp/current/kafka-broker/conf")
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'kafka', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'kafka', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 32b5d70..8cdbb2e 100644 (file)
@@ -179,7 +179,6 @@ class TestKnoxGateway(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', ('tar',
@@ -191,14 +190,6 @@ class TestKnoxGateway(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'knox-server', version),sudo = True)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
 
   @patch("os.remove")
   @patch("os.path.exists")
@@ -228,7 +219,6 @@ class TestKnoxGateway(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute', ('tar',
@@ -243,14 +233,6 @@ class TestKnoxGateway(RMFTestCase):
 
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
 
   @patch("os.remove")
   @patch("os.path.exists")
@@ -311,14 +293,6 @@ class TestKnoxGateway(RMFTestCase):
     )
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'knox', '--stack-version', version, '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
     '''
 
   @patch("os.path.islink")
index 961be84..2936258 100644 (file)
@@ -222,16 +222,6 @@ class TestRangerAdmin(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-admin', '2.3.0.0-1234'), sudo=True)
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'ranger-admin', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'ranger-admin', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index f0aee10..4c5ce0a 100644 (file)
@@ -144,7 +144,6 @@ class TestRangerUsersync(RMFTestCase):
                        config_dict = json_content,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (1, None), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertTrue(setup_usersync_mock.called)
@@ -153,14 +152,6 @@ class TestRangerUsersync(RMFTestCase):
                               sudo = True)
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'ranger-usersync', '2.3.0.0-1234'), sudo=True)
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'ranger-usersync', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'ranger-usersync', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
 
   def assert_configure_default(self):
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/ranger-usersync/install.properties',
index d4dcc48..3583016 100644 (file)
@@ -193,26 +193,9 @@ class TestSliderClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, '')],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'slider-client', '2.3.0.0-1234'), sudo=True)
     self.assertResourceCalledIgnoreEarlier("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.3.0.0-1234'), sudo=True)
-    self.assertResourceCalled('Link', '/etc/hadoop/conf', to='/usr/hdp/current/hadoop-client/conf')
 
     self.assertNoMoreResources()
-
-    self.assertEquals(2, mocks_dict['call'].call_count)
-    self.assertEquals(2, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'slider', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'slider', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[1][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[1][0][0])
index b87f8fc..40c9d47 100644 (file)
@@ -339,17 +339,7 @@ class TestJobHistoryServer(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-historyserver', version), sudo=True)
     self.assertNoMoreResources()
-
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'spark', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'spark', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
index 3fe59ad..b4535cd 100644 (file)
@@ -168,20 +168,11 @@ class TestSparkClient(RMFTestCase):
                        config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None, ''), (0, None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'spark-client', version), sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', '--package', 'spark', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['checked_call'].call_args_list[0][0][0])
-    self.assertEquals(
-      ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'spark', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
-       mocks_dict['call'].call_args_list[0][0][0])
 
   def test_stack_upgrade_save_new_config(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.2/configs/default.json"
index d445d74..2eeec46 100644 (file)
@@ -22,6 +22,7 @@ from mock.mock import patch, MagicMock
 from stacks.utils.RMFTestCase import *
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.script import Script
 
 class TestConfSelect(RMFTestCase):
 
@@ -33,6 +34,12 @@ class TestConfSelect(RMFTestCase):
     self.env = Environment(test_mode=True)
     self.env.__enter__()
 
+    Script.config = dict()
+    Script.config.update( { "configurations" : { "cluster-env" : {} }, "hostLevelParams": {} } )
+    Script.config["configurations"]["cluster-env"]["stack_packages"] = RMFTestCase.get_stack_packages()
+    Script.config["hostLevelParams"] = { "stack_name" : "HDP" }
+
+
   def tearDown(self):
     self.env.__exit__(None,None,None)
 
@@ -82,9 +89,11 @@ class TestConfSelect(RMFTestCase):
     Tests that a bad enum throws an exception.
     :return:
     """
+    packages = conf_select.get_package_dirs()
+
     try:
       conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-        conf_select._PACKAGE_DIRS["hadoop"], link_to = "INVALID")
+        packages["hadoop"], link_to = "INVALID")
       raise Exception("Expected failure when supplying a bad enum for link_to")
     except:
       pass
@@ -125,15 +134,16 @@ class TestConfSelect(RMFTestCase):
 
       return False
 
+    packages = conf_select.get_package_dirs()
+
     path_mock.side_effect = path_mock_call
     islink_mock.side_effect = islink_mock_call
     shell_call_mock.side_effect = mock_call
-    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", conf_select._PACKAGE_DIRS["hadoop"])
+    conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234", packages["hadoop"])
 
-    self.assertEqual(pprint.pformat(self.env.resource_list),
-      "[Execute[('cp', '-R', '-p', '/etc/hadoop/conf', '/etc/hadoop/conf.backup')],\n "
-      "Directory['/etc/hadoop/conf'],\n "
-      "Link['/etc/hadoop/conf']]")
+    self.assertEqual(pprint.pformat(self.env.resource_list[0]), "Execute[('cp', '-R', '-p', u'/etc/hadoop/conf', u'/etc/hadoop/conf.backup')]")
+    self.assertEqual(pprint.pformat(self.env.resource_list[1]), "Directory['/etc/hadoop/conf']")
+    self.assertEqual(pprint.pformat(self.env.resource_list[2]), "Link['/etc/hadoop/conf']")
 
 
   @patch.object(os.path, "exists", new = MagicMock(return_value = True))
@@ -147,8 +157,10 @@ class TestConfSelect(RMFTestCase):
     Tests that conf-select symlinking can detect a wrong directory
     :return:
     """
+    packages = conf_select.get_package_dirs()
+
     conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-      conf_select._PACKAGE_DIRS["hadoop"])
+      packages["hadoop"])
 
     self.assertEqual(pprint.pformat(self.env.resource_list),
       "[Link['/etc/hadoop/conf'], Link['/etc/hadoop/conf']]")
@@ -161,7 +173,9 @@ class TestConfSelect(RMFTestCase):
     Tests that conf-select symlinking does nothing if the directory doesn't exist
     :return:
     """
+    packages = conf_select.get_package_dirs()
+
     conf_select.convert_conf_directories_to_symlinks("hadoop", "2.3.0.0-1234",
-      conf_select._PACKAGE_DIRS["hadoop"], link_to = conf_select.DIRECTORY_TYPE_BACKUP)
+      packages["hadoop"], link_to = conf_select.DIRECTORY_TYPE_BACKUP)
 
     self.assertEqual(pprint.pformat(self.env.resource_list), "[]")
\ No newline at end of file