AMBARI-21581 - Replace Hard Coded conf-select Structures (jonathanhurley)
[ambari.git] / ambari-server / src / main / resources / common-services / HIVE / 2.1.0.3.0 / package / scripts / hive_server_interactive.py
1 #!/usr/bin/env python
2 """
3 Licensed to the Apache Software Foundation (ASF) under one
4 or more contributor license agreements. See the NOTICE file
5 distributed with this work for additional information
6 regarding copyright ownership. The ASF licenses this file
7 to you under the Apache License, Version 2.0 (the
8 "License"); you may not use this file except in compliance
9 with the License. You may obtain a copy of the License at
10
11 http://www.apache.org/licenses/LICENSE-2.0
12
13 Unless required by applicable law or agreed to in writing, software
14 distributed under the License is distributed on an "AS IS" BASIS,
15 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 See the License for the specific language governing permissions and
17 limitations under the License.
18
19 """
20 # Python Imports
21 import subprocess
22 import os
23 import re
24 import time
25 import shutil
26 from datetime import datetime
27 import json
28
29 # Ambari Commons & Resource Management imports
30 from resource_management.libraries.script.script import Script
31 from resource_management.libraries.functions import format
32 from resource_management.libraries.functions.check_process_status import check_process_status
33 from resource_management.core.source import InlineTemplate
34 from resource_management.core.resources.system import Execute, Directory
35
36 # Imports needed for Rolling/Express Upgrade
37 from resource_management.libraries.functions import StackFeature
38 from resource_management.libraries.functions.stack_features import check_stack_feature
39 from resource_management.libraries.functions import stack_select
40 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
41
42 from resource_management.core import shell
43 from resource_management.core.exceptions import Fail
44 from resource_management.core.logger import Logger
45 from ambari_commons import OSCheck, OSConst
46 from ambari_commons.os_family_impl import OsFamilyImpl
47
48 from resource_management.core.exceptions import ComponentIsNotRunning
49 from resource_management.libraries.functions.decorator import retry
50 from resource_management.libraries.functions.security_commons import build_expectations, \
51 cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
52 FILE_TYPE_XML
53
54 # Local Imports
55 from setup_ranger_hive import setup_ranger_hive
56 from hive_service_interactive import hive_service_interactive
57 from hive_interactive import hive_interactive
58 from hive_server import HiveServerDefault
59 from setup_ranger_hive_interactive import setup_ranger_hive_interactive
60
61 import traceback
62
63 class HiveServerInteractive(Script):
64 pass
65
66
67 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
68 class HiveServerInteractiveDefault(HiveServerInteractive):
69
70 def install(self, env):
71 import params
72 self.install_packages(env)
73
74 def configure(self, env):
75 import params
76 env.set_params(params)
77 hive_interactive(name='hiveserver2')
78
79 def pre_upgrade_restart(self, env, upgrade_type=None):
80 Logger.info("Executing Hive Server Interactive Stack Upgrade pre-restart")
81 import params
82 env.set_params(params)
83
84 if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
85 stack_select.select_packages(params.version)
86
87 # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
88 resource_created = copy_to_hdfs(
89 "hive2",
90 params.user_group,
91 params.hdfs_user,
92 skip=params.sysprep_skip_copy_tarballs_hdfs)
93
94 resource_created = copy_to_hdfs(
95 "tez_hive2",
96 params.user_group,
97 params.hdfs_user,
98 skip=params.sysprep_skip_copy_tarballs_hdfs) or resource_created
99
100 if resource_created:
101 params.HdfsResource(None, action="execute")
102
103 def start(self, env, upgrade_type=None):
104 import params
105 env.set_params(params)
106 self.configure(env)
107
108 if params.security_enabled:
109 # Do the security setup, internally calls do_kinit()
110 self.setup_security()
111
112 # TODO : We need have conditional [re]start of LLAP once "status check command" for LLAP is ready.
113 # Check status and based on that decide on [re]starting.
114
115 # Start LLAP before Hive Server Interactive start.
116 status = self._llap_start(env)
117 if not status:
118 raise Fail("Skipping START of Hive Server Interactive since LLAP app couldn't be STARTED.")
119
120 # TODO : test the workability of Ranger and Hive2 during upgrade
121 setup_ranger_hive_interactive(upgrade_type=upgrade_type)
122 hive_service_interactive('hiveserver2', action='start', upgrade_type=upgrade_type)
123
124
125 def stop(self, env, upgrade_type=None):
126 import params
127 env.set_params(params)
128
129 if params.security_enabled:
130 self.do_kinit()
131
132 # Stop Hive Interactive Server first
133 hive_service_interactive('hiveserver2', action='stop')
134
135 if not params.is_restart_command:
136 self._llap_stop(env)
137 else:
138 Logger.info("LLAP stop is skipped as its a restart command")
139
140 def status(self, env):
141 import status_params
142 env.set_params(status_params)
143
144 # We are not doing 'llap' status check done here as part of status check for 'HSI', as 'llap' status
145 # check is a heavy weight operation.
146
147 # Recursively check all existing gmetad pid files
148 check_process_status(status_params.hive_interactive_pid)
149
150 def restart_llap(self, env):
151 """
152 Custom command to Restart LLAP
153 """
154 Logger.info("Custom Command to retart LLAP")
155 import params
156 env.set_params(params)
157
158 if params.security_enabled:
159 self.do_kinit()
160
161 self._llap_stop(env)
162 self._llap_start(env)
163
164 def _llap_stop(self, env):
165 import params
166 Logger.info("Stopping LLAP")
167
168 stop_cmd = ["slider", "stop", params.llap_app_name]
169
170 code, output, error = shell.call(stop_cmd, user=params.hive_user, stderr=subprocess.PIPE, logoutput=True)
171 if code == 0:
172 Logger.info(format("Stopped {params.llap_app_name} application on Slider successfully"))
173 elif code == 69 and output is not None and "Unknown application instance" in output:
174 Logger.info(format("Application {params.llap_app_name} was already stopped on Slider"))
175 else:
176 raise Fail(format("Could not stop application {params.llap_app_name} on Slider. {error}\n{output}"))
177
178 # Will exit with code 4 if need to run with "--force" to delete directories and registries.
179 Execute(('slider', 'destroy', params.llap_app_name, "--force"),
180 user=params.hive_user,
181 timeout=30,
182 ignore_failures=True,
183 )
184
185 """
186 Controls the start of LLAP.
187 """
188 def _llap_start(self, env, cleanup=False):
189 import params
190 env.set_params(params)
191
192 if params.hive_server_interactive_ha:
193 """
194 Check llap app state
195 """
196 Logger.info("HSI HA is enabled. Checking if LLAP is already running ...")
197 if params.stack_supports_hive_interactive_ga:
198 status = self.check_llap_app_status_in_llap_ga(params.llap_app_name, 2, params.hive_server_interactive_ha)
199 else:
200 status = self.check_llap_app_status_in_llap_tp(params.llap_app_name, 2, params.hive_server_interactive_ha)
201
202 if status:
203 Logger.info("LLAP app '{0}' is already running.".format(params.llap_app_name))
204 return True
205 else:
206 Logger.info("LLAP app '{0}' is not running. llap will be started.".format(params.llap_app_name))
207 pass
208
209 # Call for cleaning up the earlier run(s) LLAP package folders.
210 self._cleanup_past_llap_package_dirs()
211
212 Logger.info("Starting LLAP")
213 LLAP_PACKAGE_CREATION_PATH = Script.get_tmp_dir()
214
215 unique_name = "llap-slider%s" % datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')
216
217 cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --slider-am-container-mb {params.slider_am_container_mb} "
218 "--size {params.llap_daemon_container_size}m --cache {params.hive_llap_io_mem_size}m --xmx {params.llap_heap_size}m "
219 "--loglevel {params.llap_log_level} {params.llap_extra_slider_opts} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
220
221 # Append params that are supported from Hive llap GA version.
222 if params.stack_supports_hive_interactive_ga:
223 # Figure out the Slider Anti-affinity to be used.
224 # YARN does not support anti-affinity, and therefore Slider implements AA by the means of exclusion lists, i.e, it
225 # starts containers one by one and excludes the nodes it gets (adding a delay of ~2sec./machine). When the LLAP
226 # container memory size configuration is more than half of YARN node memory, AA is implicit and should be avoided.
227 slider_placement = 4
228 if long(params.llap_daemon_container_size) > (0.5 * long(params.yarn_nm_mem)):
229 slider_placement = 0
230 Logger.info("Setting slider_placement : 0, as llap_daemon_container_size : {0} > 0.5 * "
231 "YARN NodeManager Memory({1})".format(params.llap_daemon_container_size, params.yarn_nm_mem))
232 else:
233 Logger.info("Setting slider_placement: 4, as llap_daemon_container_size : {0} <= 0.5 * "
234 "YARN NodeManager Memory({1})".format(params.llap_daemon_container_size, params.yarn_nm_mem))
235 cmd += format(" --slider-placement {slider_placement} --skiphadoopversion --skiphbasecp --instances {params.num_llap_daemon_running_nodes}")
236
237 # Setup the logger for the ga version only
238 cmd += format(" --logger {params.llap_logger}")
239 else:
240 cmd += format(" --instances {params.num_llap_nodes}")
241 if params.security_enabled:
242 llap_keytab_splits = params.hive_llap_keytab_file.split("/")
243 Logger.debug("llap_keytab_splits : {0}".format(llap_keytab_splits))
244 cmd += format(" --slider-keytab-dir .slider/keytabs/{params.hive_user}/ --slider-keytab "
245 "{llap_keytab_splits[4]} --slider-principal {params.hive_llap_principal}")
246
247 # Add the aux jars if they are specified. If empty, dont need to add this param.
248 if params.hive_aux_jars:
249 cmd+= format(" --auxjars {params.hive_aux_jars}")
250
251 # Append args.
252 llap_java_args = InlineTemplate(params.llap_app_java_opts).get_content()
253 cmd += format(" --args \" {llap_java_args}\"")
254 # Append metaspace size to args.
255 if params.java_version > 7 and params.llap_daemon_container_size > 4096:
256 if params.llap_daemon_container_size <= 32768:
257 metaspaceSize = "256m"
258 else:
259 metaspaceSize = "1024m"
260 cmd = cmd[:-1] + " -XX:MetaspaceSize="+metaspaceSize+ "\""
261
262 run_file_path = None
263 try:
264 Logger.info(format("LLAP start command: {cmd}"))
265 code, output, error = shell.checked_call(cmd, user=params.hive_user, quiet = True, stderr=subprocess.PIPE, logoutput=True)
266
267 if code != 0 or output is None:
268 raise Fail("Command failed with either non-zero return code or no output.")
269
270 # E.g., output:
271 # Prepared llap-slider-05Apr2016/run.sh for running LLAP on Slider
272 exp = r"Prepared (.*?run.sh) for running LLAP"
273 run_file_path = None
274 out_splits = output.split("\n")
275 for line in out_splits:
276 line = line.strip()
277 m = re.match(exp, line, re.I)
278 if m and len(m.groups()) == 1:
279 run_file_name = m.group(1)
280 run_file_path = os.path.join(params.hive_user_home_dir, run_file_name)
281 break
282 if not run_file_path:
283 raise Fail("Did not find run.sh file in output: " + str(output))
284
285 Logger.info(format("Run file path: {run_file_path}"))
286 Execute(run_file_path, user=params.hive_user, logoutput=True)
287 Logger.info("Submitted LLAP app name : {0}".format(params.llap_app_name))
288
289 # We need to check the status of LLAP app to figure out it got
290 # launched properly and is in running state. Then go ahead with Hive Interactive Server start.
291 if params.stack_supports_hive_interactive_ga:
292 status = self.check_llap_app_status_in_llap_ga(params.llap_app_name, params.num_retries_for_checking_llap_status)
293 else:
294 status = self.check_llap_app_status_in_llap_tp(params.llap_app_name, params.num_retries_for_checking_llap_status)
295 if status:
296 Logger.info("LLAP app '{0}' deployed successfully.".format(params.llap_app_name))
297 return True
298 else:
299 Logger.error("LLAP app '{0}' deployment unsuccessful.".format(params.llap_app_name))
300 return False
301 except:
302 # Attempt to clean up the packaged application, or potentially rename it with a .bak
303 if run_file_path is not None and cleanup:
304 parent_dir = os.path.dirname(run_file_path)
305 Directory(parent_dir,
306 action = "delete",
307 ignore_failures = True,
308 )
309
310 # throw the original exception
311 raise
312
313 """
314 Checks and deletes previous run 'LLAP package' folders, ignoring three latest packages.
315 Last three are are ignore for debugging/reference purposes.
316 Helps in keeping check on disk space used.
317 """
318 def _cleanup_past_llap_package_dirs(self):
319 try:
320 import params
321 Logger.info("Determining previous run 'LLAP package' folder(s) to be deleted ....")
322 llap_package_folder_name_prefix = "llap-slider" # Package name is like : llap-sliderYYYY-MM-DD-HH:MM:SS
323 num_folders_to_retain = 3 # Hardcoding it as of now, as no considerable use was found to provide an env param.
324 file_names = [dir_name for dir_name in os.listdir(Script.get_tmp_dir())
325 if dir_name.startswith(llap_package_folder_name_prefix)]
326
327 file_names.sort()
328 del file_names[-num_folders_to_retain:] # Ignore 'num_folders_to_retain' latest package folders.
329 Logger.info("Previous run 'LLAP package' folder(s) to be deleted = {0}".format(file_names))
330
331 if file_names:
332 for path in file_names:
333 abs_path = Script.get_tmp_dir()+"/"+path
334 Directory(abs_path,
335 action = "delete",
336 ignore_failures = True
337 )
338 else:
339 Logger.info("No '{0}*' folder deleted.".format(llap_package_folder_name_prefix))
340 except:
341 Logger.exception("Exception while doing cleanup for past 'LLAP package(s)':")
342
343
344
345 """
346 Does kinit and copies keytab for Hive/LLAP to HDFS.
347 """
348 def setup_security(self):
349 import params
350
351 self.do_kinit()
352
353 # Copy params.hive_llap_keytab_file to hdfs://<host>:<port>/user/<hive_user>/.slider/keytabs/<hive_user> , required by LLAP
354 slider_keytab_install_cmd = format("slider install-keytab --keytab {params.hive_llap_keytab_file} --folder {params.hive_user} --overwrite")
355 Execute(slider_keytab_install_cmd, user=params.hive_user)
356
357 def do_kinit(self):
358 import params
359
360 hive_interactive_kinit_cmd = format("{kinit_path_local} -kt {params.hive_server2_keytab} {params.hive_principal}; ")
361 Execute(hive_interactive_kinit_cmd, user=params.hive_user)
362
363 """
364 Get llap app status data for LLAP Tech Preview code base.
365 """
366 def _get_llap_app_status_info_in_llap_tp(self, app_name):
367 import status_params
368 LLAP_APP_STATUS_CMD_TIMEOUT = 0
369
370 llap_status_cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llapstatus --name {app_name} --findAppTimeout {LLAP_APP_STATUS_CMD_TIMEOUT}")
371 code, output, error = shell.checked_call(llap_status_cmd, user=status_params.hive_user, stderr=subprocess.PIPE,
372 logoutput=False)
373 Logger.info("Received 'llapstatus' command 'output' : {0}".format(output))
374 if code == 0:
375 return self._make_valid_json(output)
376 else:
377 Logger.info("'LLAP status command' output : ", output)
378 Logger.info("'LLAP status command' error : ", error)
379 Logger.info("'LLAP status command' exit code : ", code)
380 raise Fail("Error getting LLAP app status. ")
381
382 """
383 Get llap app status data for LLAP GA code base.
384
385 Parameters: 'percent_desired_instances_to_be_up' : A value b/w 0.0 and 1.0.
386 'total_timeout' : Total wait time while checking the status via llapstatus command
387 'refresh_rate' : Frequency of polling for llapstatus.
388 """
389 def _get_llap_app_status_info_in_llap_ga(self, percent_desired_instances_to_be_up, total_timeout, refresh_rate):
390 import status_params
391
392 # llapstatus comamnd : llapstatus -w -r <percent containers to wait for to be Up> -i <refresh_rate> -t <total timeout for this comand>
393 # -w : Watch mode waits until all LLAP daemons are running or subset of the nodes are running (threshold can be specified via -r option) (Default wait until all nodes are running)
394 # -r : When watch mode is enabled (-w), wait until the specified threshold of nodes are running (Default 1.0 which means 100% nodes are running)
395 # -i : Amount of time in seconds to wait until subsequent status checks in watch mode (Default: 1sec)
396 # -t : Exit watch mode if the desired state is not attained until the specified timeout (Default: 300sec)
397 #
398 # example : llapstatus -w -r 0.8 -i 2 -t 150
399 llap_status_cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llapstatus -w -r {percent_desired_instances_to_be_up} -i {refresh_rate} -t {total_timeout}")
400 Logger.info("\n\n\n\n\n");
401 Logger.info("LLAP status command : {0}".format(llap_status_cmd))
402 code, output, error = shell.checked_call(llap_status_cmd, user=status_params.hive_user, quiet=True, stderr=subprocess.PIPE,
403 logoutput=True)
404
405 if code == 0:
406 return self._make_valid_json(output)
407 else:
408 Logger.info("'LLAP status command' output : ", output)
409 Logger.info("'LLAP status command' error : ", error)
410 Logger.info("'LLAP status command' exit code : ", code)
411 raise Fail("Error getting LLAP app status. ")
412
413
414
415
416 """
417 Remove extra lines from 'llapstatus' status output (eg: because of MOTD logging) so as to have a valid JSON data to be passed in
418 to JSON converter.
419 """
420 def _make_valid_json(self, output):
421 '''
422
423 Note: It is assumed right now that extra lines will be only at the start and not at the end.
424
425 Sample expected JSON to be passed for 'loads' is either of the form :
426
427 Case 'A':
428 {
429 "amInfo" : {
430 "appName" : "llap0",
431 "appType" : "org-apache-slider",
432 "appId" : "APP1",
433 "containerId" : "container_1466036628595_0010_01_000001",
434 "hostname" : "hostName",
435 "amWebUrl" : "http://hostName:port/"
436 },
437 "state" : "LAUNCHING",
438 ....
439 "desiredInstances" : 1,
440 "liveInstances" : 0,
441 ....
442 ....
443 }
444
445 or
446
447 Case 'B':
448 {
449 "state" : "APP_NOT_FOUND"
450 }
451
452 '''
453 splits = output.split("\n")
454
455 len_splits = len(splits)
456 if (len_splits < 3):
457 raise Fail ("Malformed JSON data received from 'llapstatus' command. Exiting ....")
458
459 marker_idx = None # To detect where from to start reading for JSON data
460 for idx, split in enumerate(splits):
461 curr_elem = split.strip()
462 if idx+2 > len_splits:
463 raise Fail("Iterated over the received 'llapstatus' comamnd. Couldn't validate the received output for JSON parsing.")
464 next_elem = (splits[(idx + 1)]).strip()
465 if curr_elem == "{":
466 if next_elem == "\"amInfo\" : {" and (splits[len_splits-1]).strip() == '}':
467 # For Case 'A'
468 marker_idx = idx
469 break;
470 elif idx+3 == len_splits and next_elem.startswith('"state" : ') and (splits[idx + 2]).strip() == '}':
471 # For Case 'B'
472 marker_idx = idx
473 break;
474
475
476 # Remove extra logging from possible JSON output
477 if marker_idx is None:
478 raise Fail("Couldn't validate the received output for JSON parsing.")
479 else:
480 if marker_idx != 0:
481 del splits[0:marker_idx]
482
483 scanned_output = '\n'.join(splits)
484 llap_app_info = json.loads(scanned_output)
485 return llap_app_info
486
487
488 """
489 Checks llap app status. The states can be : 'COMPLETE', 'APP_NOT_FOUND', 'RUNNING_PARTIAL', 'RUNNING_ALL' & 'LAUNCHING'.
490
491 if app is in 'APP_NOT_FOUND', 'RUNNING_PARTIAL' and 'LAUNCHING' state:
492 we wait for 'num_times_to_wait' to have app in (1). 'RUNNING_ALL' or (2). 'RUNNING_PARTIAL'
493 state with 80% or more 'desiredInstances' running and Return True
494 else :
495 Return False
496
497 Parameters: llap_app_name : deployed llap app name.
498 num_retries : Number of retries to check the LLAP app status.
499 """
500 def check_llap_app_status_in_llap_tp(self, llap_app_name, num_retries, return_immediately_if_stopped=False):
501 curr_time = time.time()
502 num_retries = int(num_retries)
503 if num_retries <= 0:
504 Logger.info("Read 'num_retries' as : {0}. Setting it to : {1}".format(num_retries, 2))
505 num_retries = 2
506 if num_retries > 20:
507 Logger.info("Read 'num_retries' as : {0}. Setting it to : {1}".format(num_retries, 20))
508 num_retries = 20
509
510 @retry(times=num_retries, sleep_time=2, err_class=Fail)
511 def do_retries():
512 llap_app_info = self._get_llap_app_status_info_in_llap_tp(llap_app_name)
513 return self._verify_llap_app_status(llap_app_info, llap_app_name, return_immediately_if_stopped, curr_time)
514
515 try:
516 status = do_retries()
517 return status
518 except Exception, e:
519 Logger.info("LLAP app '{0}' did not come up after a wait of {1} seconds.".format(llap_app_name,
520 time.time() - curr_time))
521 traceback.print_exc()
522 return False
523
524 def check_llap_app_status_in_llap_ga(self, llap_app_name, num_retries, return_immediately_if_stopped=False):
525 curr_time = time.time()
526 total_timeout = int(num_retries) * 20; # Total wait time while checking the status via llapstatus command
527 Logger.debug("Calculated 'total_timeout' : {0} using config 'num_retries_for_checking_llap_status' : {1}".format(total_timeout, num_retries))
528 refresh_rate = 2 # Frequency of checking the llapstatus
529 percent_desired_instances_to_be_up = 80 # Out of 100.
530 llap_app_info = self._get_llap_app_status_info_in_llap_ga(percent_desired_instances_to_be_up/100.0, total_timeout, refresh_rate)
531
532 try:
533 return self._verify_llap_app_status(llap_app_info, llap_app_name, return_immediately_if_stopped, curr_time)
534 except Exception as e:
535 Logger.info(e.message)
536 return False
537
538 def get_log_folder(self):
539 import params
540 return params.hive_log_dir
541
542 def get_user(self):
543 import params
544 return params.hive_user
545
546 def _verify_llap_app_status(self, llap_app_info, llap_app_name, return_immediately_if_stopped, curr_time):
547 if llap_app_info is None or 'state' not in llap_app_info:
548 Logger.error("Malformed JSON data received for LLAP app. Exiting ....")
549 return False
550
551 # counters based on various states.
552 live_instances = 0
553 desired_instances = 0
554 percent_desired_instances_to_be_up = 80 # Used in 'RUNNING_PARTIAL' state.
555 if return_immediately_if_stopped and (llap_app_info['state'].upper() in ('APP_NOT_FOUND', 'COMPLETE')):
556 return False
557 if llap_app_info['state'].upper() == 'RUNNING_ALL':
558 Logger.info(
559 "LLAP app '{0}' in '{1}' state.".format(llap_app_name, llap_app_info['state']))
560 return True
561 elif llap_app_info['state'].upper() == 'RUNNING_PARTIAL':
562 # Check how many instances were up.
563 if 'liveInstances' in llap_app_info and 'desiredInstances' in llap_app_info:
564 live_instances = llap_app_info['liveInstances']
565 desired_instances = llap_app_info['desiredInstances']
566 else:
567 Logger.info(
568 "LLAP app '{0}' is in '{1}' state, but 'instances' information not available in JSON received. " \
569 "Exiting ....".format(llap_app_name, llap_app_info['state']))
570 Logger.info(llap_app_info)
571 return False
572 if desired_instances == 0:
573 Logger.info("LLAP app '{0}' desired instance are set to 0. Exiting ....".format(llap_app_name))
574 return False
575
576 percentInstancesUp = 0
577 if live_instances > 0:
578 percentInstancesUp = float(live_instances) / desired_instances * 100
579 if percentInstancesUp >= percent_desired_instances_to_be_up:
580 Logger.info("LLAP app '{0}' in '{1}' state. Live Instances : '{2}' >= {3}% of Desired Instances : " \
581 "'{4}'.".format(llap_app_name, llap_app_info['state'],
582 llap_app_info['liveInstances'],
583 percent_desired_instances_to_be_up,
584 llap_app_info['desiredInstances']))
585 return True
586 else:
587 Logger.info("LLAP app '{0}' in '{1}' state. Live Instances : '{2}'. Desired Instances : " \
588 "'{3}' after {4} secs.".format(llap_app_name, llap_app_info['state'],
589 llap_app_info['liveInstances'],
590 llap_app_info['desiredInstances'],
591 time.time() - curr_time))
592 raise Fail("App state is RUNNING_PARTIAL. Live Instances : '{0}', Desired Instance : '{1}'".format(llap_app_info['liveInstances'],
593 llap_app_info['desiredInstances']))
594 elif llap_app_info['state'].upper() in ['APP_NOT_FOUND', 'LAUNCHING', 'COMPLETE']:
595 status_str = format("LLAP app '{0}' current state is {1}.".format(llap_app_name, llap_app_info['state']))
596 Logger.info(status_str)
597 raise Fail(status_str)
598 else: # Covers any unknown that we get.
599 Logger.info(
600 "LLAP app '{0}' current state is '{1}'. Expected : 'RUNNING'.".format(llap_app_name, llap_app_info['state']))
601 return False
602
603 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
604 class HiveServerInteractiveWindows(HiveServerInteractive):
605
606 def status(self, env):
607 pass
608
609 if __name__ == "__main__":
610 HiveServerInteractive().execute()