From 4d1032eae86c3836b170d9128c537b66aae5d7bb Mon Sep 17 00:00:00 2001 From: Noah Watkins Date: Tue, 20 Jan 2015 10:55:28 -0800 Subject: [PATCH] hadoop: separate ceph/hdfs config actions Signed-off-by: Noah Watkins --- teuthology/task/hadoop.py | 136 ++++++++++++++++++++------------------ 1 file changed, 71 insertions(+), 65 deletions(-) diff --git a/teuthology/task/hadoop.py b/teuthology/task/hadoop.py index 7da0d44215..652f500dd9 100644 --- a/teuthology/task/hadoop.py +++ b/teuthology/task/hadoop.py @@ -134,7 +134,7 @@ def get_hdfs_site_data(ctx): path = "{tdir}/hadoop/etc/hadoop/hdfs-site.xml".format(tdir=tempdir) return path, data -def configure(ctx, config, hadoops, hadoop_dir): +def configure(ctx, config, hadoops): tempdir = teuthology.get_testdir(ctx) log.info("Writing Hadoop slaves file...") @@ -173,6 +173,22 @@ def configure(ctx, config, hadoops, hadoop_dir): data = "JAVA_HOME=/usr/lib/jvm/default-java\n" # FIXME: RHEL? teuthology.prepend_lines_to_file(remote, path, data) + if config.get('hdfs', False): + log.info("Formatting HDFS...") + testdir = teuthology.get_testdir(ctx) + hadoop_dir = "{tdir}/hadoop/".format(tdir=testdir) + masters = ctx.cluster.only(teuthology.is_type('hadoop.master')) + assert len(masters.remotes) == 1 + master = masters.remotes.keys()[0] + master.run( + args = [ + hadoop_dir + "bin/hadoop", + "namenode", + "-format" + ], + wait = True, + ) + @contextlib.contextmanager def install_hadoop(ctx, config): testdir = teuthology.get_testdir(ctx) @@ -231,76 +247,76 @@ def install_hadoop(ctx, config): ) ) - log.info("Fetching cephfs-hadoop...") - - sha1, url = teuthology.get_ceph_binary_url( - package = "hadoop", - format = "jar", - dist = "precise", - arch = "x86_64", - flavor = "basic", - branch = "master") - + log.info("Create Hadoop temporary directory...") + hadoop_tmp_dir = "{tdir}/hadoop_tmp".format(tdir=testdir) run.wait( hadoops.run( args = [ - 'wget', - '-nv', - '-O', - "{tdir}/cephfs-hadoop.jar".format(tdir=testdir), # FIXME - url + "/cephfs-hadoop-0.80.6.jar", # FIXME + 'mkdir', + hadoop_tmp_dir ], wait = False, ) ) - run.wait( - hadoops.run( - args = [ - 'mv', - "{tdir}/cephfs-hadoop.jar".format(tdir=testdir), - "{tdir}/hadoop/share/hadoop/common/".format(tdir=testdir), - ], - wait = False, - ) - ) + if not config.get('hdfs', False): + log.info("Fetching cephfs-hadoop...") - run.wait( - hadoops.run( - args = [ - 'cp', - "/usr/lib/jni/libcephfs_jni.so", - "{tdir}/hadoop/lib/native/".format(tdir=testdir), - ], - wait = False, + sha1, url = teuthology.get_ceph_binary_url( + package = "hadoop", + format = "jar", + dist = "precise", + arch = "x86_64", + flavor = "basic", + branch = "master") + + run.wait( + hadoops.run( + args = [ + 'wget', + '-nv', + '-O', + "{tdir}/cephfs-hadoop.jar".format(tdir=testdir), # FIXME + url + "/cephfs-hadoop-0.80.6.jar", # FIXME + ], + wait = False, + ) ) - ) - run.wait( - hadoops.run( - args = [ - 'cp', - "/usr/share/java/libcephfs.jar", - "{tdir}/hadoop/share/hadoop/common/".format(tdir=testdir), - ], - wait = False, + run.wait( + hadoops.run( + args = [ + 'mv', + "{tdir}/cephfs-hadoop.jar".format(tdir=testdir), + "{tdir}/hadoop/share/hadoop/common/".format(tdir=testdir), + ], + wait = False, + ) ) - ) + run.wait( + hadoops.run( + args = [ + 'cp', + "/usr/lib/jni/libcephfs_jni.so", + "{tdir}/hadoop/lib/native/".format(tdir=testdir), + ], + wait = False, + ) + ) - log.info("Create Hadoop temporary directory...") - hadoop_tmp_dir = "{tdir}/hadoop_tmp".format(tdir=testdir) - run.wait( - hadoops.run( - args = [ - 'mkdir', - hadoop_tmp_dir - ], - wait = False, + run.wait( + hadoops.run( + args = [ + 'cp', + "/usr/share/java/libcephfs.jar", + "{tdir}/hadoop/share/hadoop/common/".format(tdir=testdir), + ], + wait = False, + ) ) - ) - configure(ctx, config, hadoops, hadoop_dir) + configure(ctx, config, hadoops) try: yield @@ -325,16 +341,6 @@ def start_hadoop(ctx, config): assert len(masters.remotes) == 1 master = masters.remotes.keys()[0] - log.info("Formatting HDFS...") - master.run( - args = [ - hadoop_dir + "bin/hadoop", - "namenode", - "-format" - ], - wait = True, - ) - log.info("Stopping Hadoop daemons") master.run( args = [ -- 2.39.5