Browse Source

[libzookeeper/indexer] Rewrite to not leak sockets or temp files

We weren't closing the zookeeper in a couple places, so to be
safer, this patch changes all users of ZookeeperClient to use
a `with` context to make sure things get closed.

Along the way, it adds some try-excepts and try-finallys to make
sure all the temp directories created by `copy_configs` are
always cleaned up.
Erick Tryzelaar 10 years ago
parent
commit
b94207f915

+ 35 - 27
desktop/libs/indexer/src/indexer/controller.py

@@ -126,22 +126,25 @@ class CollectionManagerController(object):
     Create schema.xml file so that we can set UniqueKey field.
     Create schema.xml file so that we can set UniqueKey field.
     """
     """
     if self.is_solr_cloud_mode():
     if self.is_solr_cloud_mode():
-      # solrcloud mode
-
-      # Need to remove path afterwards
-      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)
+      self._create_solr_cloud_collection(name, fields, unique_key_field, df)
+    else:
+      self._create_non_solr_cloud_collection(name, fields, unique_key_field, df)
 
 
-      zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False)
+  def _create_solr_cloud_collection(self, name, fields, unique_key_field, df):
+    with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
       root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
       root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
-      config_root_path = '%s/%s' % (solr_config_path, 'conf')
-      try:
-        zc.copy_path(root_node, config_root_path)
-      except Exception, e:
-        zc.delete_path(root_node)
-        raise PopupException(_('Error in copying Solr configurations.'), detail=e)
 
 
-      # Don't want directories laying around
-      shutil.rmtree(tmp_path)
+      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)
+      try:
+        config_root_path = '%s/%s' % (solr_config_path, 'conf')
+        try:
+          zc.copy_path(root_node, config_root_path)
+        except Exception, e:
+          zc.delete_path(root_node)
+          raise PopupException(_('Error in copying Solr configurations.'), detail=e)
+      finally:
+        # Don't want directories laying around
+        shutil.rmtree(tmp_path)
 
 
       api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
       api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
       if not api.create_collection(name):
       if not api.create_collection(name):
@@ -151,21 +154,26 @@ class CollectionManagerController(object):
         except Exception, e:
         except Exception, e:
           raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
           raise PopupException(_('Error in deleting Solr configurations.'), detail=e)
         raise PopupException(_('Could not create collection. Check error logs for more info.'))
         raise PopupException(_('Could not create collection. Check error logs for more info.'))
-    else:
-      # Non-solrcloud mode
-      # Create instance directory locally.
-      instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
-      if os.path.exists(instancedir):
-        raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
-      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False)
+
+  def _create_non_solr_cloud_collection(self, name, fields, unique_key_field, df):
+    # Non-solrcloud mode
+    # Create instance directory locally.
+    instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
+    if os.path.exists(instancedir):
+      raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
+
+    tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False)
+    try:
       shutil.move(solr_config_path, instancedir)
       shutil.move(solr_config_path, instancedir)
+    finally:
       shutil.rmtree(tmp_path)
       shutil.rmtree(tmp_path)
 
 
-      api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
-      if not api.create_core(name, instancedir):
-        # Delete instance directory if we couldn't create a collection.
-        shutil.rmtree(instancedir)
-        raise PopupException(_('Could not create collection. Check error logs for more info.'))
+    api = SolrApi(SOLR_URL.get(), self.user, SECURITY_ENABLED.get())
+    if not api.create_core(name, instancedir):
+      # Delete instance directory if we couldn't create a collection.
+      shutil.rmtree(instancedir)
+      raise PopupException(_('Could not create collection. Check error logs for more info.'))
+
 
 
   def delete_collection(self, name, core):
   def delete_collection(self, name, core):
     """
     """
@@ -179,8 +187,8 @@ class CollectionManagerController(object):
       # Delete instance directory.
       # Delete instance directory.
       try:
       try:
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
-        zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False)
-        zc.delete_path(root_node)
+        with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
+          zc.delete_path(root_node)
       except Exception, e:
       except Exception, e:
         # Re-create collection so that we don't have an orphan config
         # Re-create collection so that we don't have an orphan config
         api.add_collection(name)
         api.add_collection(name)

+ 28 - 19
desktop/libs/indexer/src/indexer/controller2.py

@@ -101,10 +101,18 @@ class IndexController(object):
     Create schema.xml file so that we can set UniqueKey field.
     Create schema.xml file so that we can set UniqueKey field.
     """
     """
     if self.is_solr_cloud_mode():
     if self.is_solr_cloud_mode():
+      self._create_solr_cloud_index(name, fields, unique_key_field, df)
+
+    else:  # Non-solrcloud mode
+      self._create_non_solr_cloud_index(name, fields, unique_key_field, df)
+
+    return name
+
+  def _create_solr_cloud_index(self, name, fields, unique_key_field, df):
+    with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
       tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)
       tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, True)
 
 
       try:
       try:
-        zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False)
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
         config_root_path = '%s/%s' % (solr_config_path, 'conf')
         config_root_path = '%s/%s' % (solr_config_path, 'conf')
         zc.copy_path(root_node, config_root_path)
         zc.copy_path(root_node, config_root_path)
@@ -120,27 +128,28 @@ class IndexController(object):
         # Remove tmp config directory
         # Remove tmp config directory
         shutil.rmtree(tmp_path)
         shutil.rmtree(tmp_path)
 
 
-    else:  # Non-solrcloud mode
-      # Create instance directory locally.
-      instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
 
 
-      if os.path.exists(instancedir):
-        raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
+  def _create_non_solr_cloud_index(self, name, fields, unique_key_field, df):
+    # Create instance directory locally.
+    instancedir = os.path.join(CORE_INSTANCE_DIR.get(), name)
 
 
+    if os.path.exists(instancedir):
+      raise PopupException(_("Instance directory %s already exists! Please remove it from the file system.") % instancedir)
+
+    try:
+      tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False)
       try:
       try:
-        tmp_path, solr_config_path = copy_configs(fields, unique_key_field, df, False)
         shutil.move(solr_config_path, instancedir)
         shutil.move(solr_config_path, instancedir)
-        shutil.rmtree(tmp_path)
-
-        if not self.api.create_core(name, instancedir):
-          raise Exception('Failed to create core: %s' % name)
-      except Exception, e:
-        raise PopupException(_('Could not create index. Check error logs for more info.'), detail=e)
       finally:
       finally:
-        # Delete instance directory if we couldn't create the core.
-        shutil.rmtree(instancedir)
+        shutil.rmtree(tmp_path)
 
 
-    return name
+      if not self.api.create_core(name, instancedir):
+        raise Exception('Failed to create core: %s' % name)
+    except Exception, e:
+      raise PopupException(_('Could not create index. Check error logs for more info.'), detail=e)
+    finally:
+      # Delete instance directory if we couldn't create the core.
+      shutil.rmtree(instancedir)
 #
 #
 
 
   def delete_index(self, name):
   def delete_index(self, name):
@@ -155,8 +164,8 @@ class IndexController(object):
       # Delete instance directory.
       # Delete instance directory.
       try:
       try:
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
         root_node = '%s/%s' % (ZK_SOLR_CONFIG_NAMESPACE, name)
-        zc = ZookeeperClient(hosts=get_solr_ensemble(), read_only=False)
-        zc.delete_path(root_node)
+        with ZookeeperClient(hosts=get_solr_ensemble(), read_only=False) as zc:
+          zc.delete_path(root_node)
       except Exception, e:
       except Exception, e:
         # Re-create collection so that we don't have an orphan config
         # Re-create collection so that we don't have an orphan config
         self.api.add_collection(name)
         self.api.add_collection(name)
@@ -188,4 +197,4 @@ class IndexController(object):
       for index in range(0, len(FLAGS)):
       for index in range(0, len(FLAGS)):
         flags = FLAGS[index]
         flags = FLAGS[index]
         field[flags[1]] = field['flags'][index] == FLAGS[index][0]
         field[flags[1]] = field['flags'][index] == FLAGS[index][0]
-    return fields
+    return fields

+ 32 - 27
desktop/libs/indexer/src/indexer/utils.py

@@ -83,33 +83,38 @@ def copy_configs(fields, unique_key_field, df, solr_cloud_mode=True):
   # Create temporary copy of solr configs
   # Create temporary copy of solr configs
   tmp_path = tempfile.mkdtemp()
   tmp_path = tempfile.mkdtemp()
 
 
-  config_template_path = get_config_template_path(solr_cloud_mode)
-
-  solr_config_path = os.path.join(tmp_path, 'solr_configs')
-  shutil.copytree(config_template_path, solr_config_path)
-
-  if fields or unique_key_field:
-    # Get complete schema.xml
-    with open(os.path.join(config_template_path, 'conf/schema.xml')) as f:
-      schemaxml = SchemaXml(f.read())
-      schemaxml.uniqueKeyField(unique_key_field)
-      schemaxml.fields(fields)
-
-    # Write complete schema.xml to copy
-    with open(os.path.join(solr_config_path, 'conf/schema.xml'), 'w') as f:
-      f.write(smart_str(schemaxml.xml))
-
-  if df:
-    # Get complete solrconfig.xml
-    with open(os.path.join(config_template_path, 'conf/solrconfig.xml')) as f:
-      solrconfigxml = SolrConfigXml(f.read())
-      solrconfigxml.defaultField(df)
-
-    # Write complete solrconfig.xml to copy
-    with open(os.path.join(solr_config_path, 'conf/solrconfig.xml'), 'w') as f:
-      f.write(smart_str(solrconfigxml.xml))
-
-  return tmp_path, solr_config_path
+  try:
+    config_template_path = get_config_template_path(solr_cloud_mode)
+
+    solr_config_path = os.path.join(tmp_path, 'solr_configs')
+    shutil.copytree(config_template_path, solr_config_path)
+
+    if fields or unique_key_field:
+      # Get complete schema.xml
+      with open(os.path.join(config_template_path, 'conf/schema.xml')) as f:
+        schemaxml = SchemaXml(f.read())
+        schemaxml.uniqueKeyField(unique_key_field)
+        schemaxml.fields(fields)
+
+      # Write complete schema.xml to copy
+      with open(os.path.join(solr_config_path, 'conf/schema.xml'), 'w') as f:
+        f.write(smart_str(schemaxml.xml))
+
+    if df:
+      # Get complete solrconfig.xml
+      with open(os.path.join(config_template_path, 'conf/solrconfig.xml')) as f:
+        solrconfigxml = SolrConfigXml(f.read())
+        solrconfigxml.defaultField(df)
+
+      # Write complete solrconfig.xml to copy
+      with open(os.path.join(solr_config_path, 'conf/solrconfig.xml'), 'w') as f:
+        f.write(smart_str(solrconfigxml.xml))
+
+    return tmp_path, solr_config_path
+  except Exception:
+    # Don't leak the tempdir if there was an exception.
+    shutil.rmtree(tmp_path)
+    raise
 
 
 
 
 def get_field_types(field_list, iterations=3):
 def get_field_types(field_list, iterations=3):

+ 2 - 2
desktop/libs/libsentry/src/libsentry/api.py

@@ -255,8 +255,8 @@ def _get_server_properties():
       if not _api_cache:
       if not _api_cache:
 
 
         servers = []
         servers = []
-        client = ZookeeperClient(hosts=get_sentry_server_ha_zookeeper_quorum())
-        sentry_servers = client.get_children_data(namespace=get_sentry_server_ha_zookeeper_namespace())
+        with ZookeeperClient(hosts=get_sentry_server_ha_zookeeper_quorum()) as client:
+          sentry_servers = client.get_children_data(namespace=get_sentry_server_ha_zookeeper_namespace())
 
 
         for data in sentry_servers:
         for data in sentry_servers:
           server = json.loads(data.decode("utf-8"))
           server = json.loads(data.decode("utf-8"))

+ 45 - 32
desktop/libs/libzookeeper/src/libzookeeper/models.py

@@ -55,57 +55,70 @@ class ZookeeperClient(object):
                           sasl_server_principal=self.sasl_server_principal)
                           sasl_server_principal=self.sasl_server_principal)
 
 
 
 
-  def get_children_data(self, namespace):
+  def start(self):
+    """Start the zookeeper session."""
     self.zk.start()
     self.zk.start()
-    try:
-      children = self.zk.get_children(namespace)
 
 
-      children_data = []
 
 
-      for node in children:
-        data, stat = self.zk.get("%s/%s" % (namespace, node))
-        children_data.append(data)
+  def stop(self):
+    """Stop the zookeeper session, but leaves the socket open."""
+    self.zk.stop()
 
 
-      return children_data
-    finally:
-      self.zk.stop()
+
+  def close(self):
+    """Closes a stopped zookeeper socket."""
+    self.zk.close()
+
+
+  def get_children_data(self, namespace):
+    children = self.zk.get_children(namespace)
+
+    children_data = []
+
+    for node in children:
+      data, stat = self.zk.get("%s/%s" % (namespace, node))
+      children_data.append(data)
+
+    return children_data
 
 
 
 
   def path_exists(self, namespace):
   def path_exists(self, namespace):
-    self.zk.start()
-    try:
-      return self.zk.exists(namespace) is not None
-    finally:
-      self.zk.stop()
+    return self.zk.exists(namespace) is not None
 
 
 
 
   def copy_path(self, namespace, filepath):
   def copy_path(self, namespace, filepath):
     if self.read_only:
     if self.read_only:
       raise ReadOnlyClientException('Cannot execute copy_path when read_only is set to True.')
       raise ReadOnlyClientException('Cannot execute copy_path when read_only is set to True.')
 
 
-    self.zk.start()
-    try:
-      self.zk.ensure_path(namespace)
-      for dir, subdirs, files in os.walk(filepath):
-        path = dir.replace(filepath, '').strip('/')
-        if path:
-          node_path = '%s/%s' % (namespace, path)
-          self.zk.create(path=node_path, value='', makepath=True)
-        for filename in files:
-          node_path = '%s/%s/%s' % (namespace, path, filename)
-          with open(os.path.join(dir, filename), 'r') as f:
-            file_content = f.read()
-            self.zk.create(path=node_path, value=file_content, makepath=True)
-    finally:
-      self.zk.stop()
+    self.zk.ensure_path(namespace)
+    for dir, subdirs, files in os.walk(filepath):
+      path = dir.replace(filepath, '').strip('/')
+      if path:
+        node_path = '%s/%s' % (namespace, path)
+        self.zk.create(path=node_path, value='', makepath=True)
+      for filename in files:
+        node_path = '%s/%s/%s' % (namespace, path, filename)
+        with open(os.path.join(dir, filename), 'r') as f:
+          file_content = f.read()
+          self.zk.create(path=node_path, value=file_content, makepath=True)
 
 
 
 
   def delete_path(self, namespace):
   def delete_path(self, namespace):
     if self.read_only:
     if self.read_only:
       raise ReadOnlyClientException('Cannot execute delete_path when read_only is set to True.')
       raise ReadOnlyClientException('Cannot execute delete_path when read_only is set to True.')
 
 
+    self.zk.delete(namespace, recursive=True)
+
+
+  def __enter__(self):
+    """Start a zookeeper session and return a `with` context."""
     self.zk.start()
     self.zk.start()
+    return self
+
+
+  def __exit__(self, exc_type, exc_value, traceback):
+    """Stops and closes zookeeper session at the end of the `with` context."""
     try:
     try:
-      self.zk.delete(namespace, recursive=True)
+      self.stop()
     finally:
     finally:
-      self.zk.stop()
+      self.close()

+ 10 - 31
desktop/libs/libzookeeper/src/libzookeeper/tests.py

@@ -91,34 +91,23 @@ class TestWithZooKeeper:
     shutil.rmtree(cls.local_directory)
     shutil.rmtree(cls.local_directory)
 
 
   def teardown(self):
   def teardown(self):
-    client = ZookeeperClient(hosts=zkensemble(), read_only=False)
-    # Delete the root_node first just in case it wasn't cleaned up in previous run
-    client.zk.start()
-    try:
+    with ZookeeperClient(hosts=zkensemble(), read_only=False) as client:
       if client.zk.exists(self.namespace):
       if client.zk.exists(self.namespace):
         client.zk.delete(self.namespace, recursive=True)
         client.zk.delete(self.namespace, recursive=True)
-    finally:
-      client.zk.stop()
 
 
   def test_get_children_data(self):
   def test_get_children_data(self):
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_path_exists')
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_path_exists')
-    client = ZookeeperClient(hosts=zkensemble(), read_only=False)
 
 
-    client.zk.start()
-    try:
+    with ZookeeperClient(hosts=zkensemble(), read_only=False) as client:
       client.zk.create(root_node, value='test_path_exists', makepath=True)
       client.zk.create(root_node, value='test_path_exists', makepath=True)
-    finally:
-      client.zk.stop()
 
 
-    db = client.get_children_data(namespace=TestWithZooKeeper.namespace)
-    assert_true(len(db) > 0)
+      db = client.get_children_data(namespace=TestWithZooKeeper.namespace)
+      assert_true(len(db) > 0)
 
 
   def test_path_exists(self):
   def test_path_exists(self):
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_path_exists')
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_path_exists')
-    client = ZookeeperClient(hosts=zkensemble(), read_only=False)
 
 
-    client.zk.start()
-    try:
+    with ZookeeperClient(hosts=zkensemble(), read_only=False) as client:
       client.zk.create(root_node, value='test_path_exists', makepath=True)
       client.zk.create(root_node, value='test_path_exists', makepath=True)
 
 
       try:
       try:
@@ -126,33 +115,23 @@ class TestWithZooKeeper:
         assert_false(client.path_exists(namespace='bogus_path'))
         assert_false(client.path_exists(namespace='bogus_path'))
       finally:
       finally:
         client.delete_path(root_node)
         client.delete_path(root_node)
-    finally:
-      client.zk.stop()
 
 
   def test_copy_and_delete_path(self):
   def test_copy_and_delete_path(self):
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_copy_and_delete_path')
     root_node = '%s/%s' % (TestWithZooKeeper.namespace, 'test_copy_and_delete_path')
-    client = ZookeeperClient(hosts=zkensemble(), read_only=False)
 
 
-    # Test copy_path
-    client.copy_path(root_node, TestWithZooKeeper.local_directory)
+    with ZookeeperClient(hosts=zkensemble(), read_only=False) as client:
+      # Test copy_path
+      client.copy_path(root_node, TestWithZooKeeper.local_directory)
 
 
-    client.zk.start()
-    try:
       assert_true(client.zk.exists('%s' % root_node))
       assert_true(client.zk.exists('%s' % root_node))
       assert_true(client.zk.exists('%s/%s' % (root_node, TestWithZooKeeper.subdir_name)))
       assert_true(client.zk.exists('%s/%s' % (root_node, TestWithZooKeeper.subdir_name)))
       assert_true(client.zk.exists('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename)))
       assert_true(client.zk.exists('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename)))
       contents, stats = client.zk.get('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename))
       contents, stats = client.zk.get('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename))
       assert_equal(contents, TestWithZooKeeper.file_contents)
       assert_equal(contents, TestWithZooKeeper.file_contents)
-    finally:
-      client.zk.stop()
 
 
-    # Test delete_path
-    client.delete_path(root_node)
+      # Test delete_path
+      client.delete_path(root_node)
 
 
-    client.zk.start()
-    try:
       assert_equal(client.zk.exists('%s' % root_node), None)
       assert_equal(client.zk.exists('%s' % root_node), None)
       assert_equal(client.zk.exists('%s/%s' % (root_node, TestWithZooKeeper.subdir_name)), None)
       assert_equal(client.zk.exists('%s/%s' % (root_node, TestWithZooKeeper.subdir_name)), None)
       assert_equal(client.zk.exists('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename)), None)
       assert_equal(client.zk.exists('%s/%s/%s' % (root_node, TestWithZooKeeper.subdir_name, TestWithZooKeeper.filename)), None)
-    finally:
-      client.zk.stop()