Browse Source

Removing hadoop_groups from thrift's class Query and regenerating files using thrift 0.5.0

vinithra 15 years ago
parent
commit
c54e255968

+ 7 - 7
apps/beeswax/gen-py/beeswaxd/BeeswaxService.py

@@ -1669,11 +1669,11 @@ class get_default_configuration_result(object):
       if fid == 0:
         if ftype == TType.LIST:
           self.success = []
-          (_etype31, _size28) = iprot.readListBegin()
-          for _i32 in xrange(_size28):
-            _elem33 = ConfigVariable()
-            _elem33.read(iprot)
-            self.success.append(_elem33)
+          (_etype24, _size21) = iprot.readListBegin()
+          for _i25 in xrange(_size21):
+            _elem26 = ConfigVariable()
+            _elem26.read(iprot)
+            self.success.append(_elem26)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -1690,8 +1690,8 @@ class get_default_configuration_result(object):
     if self.success != None:
       oprot.writeFieldBegin('success', TType.LIST, 0)
       oprot.writeListBegin(TType.STRUCT, len(self.success))
-      for iter34 in self.success:
-        iter34.write(oprot)
+      for iter27 in self.success:
+        iter27.write(oprot)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     oprot.writeFieldStop()

+ 15 - 35
apps/beeswax/gen-py/beeswaxd/ttypes.py

@@ -49,7 +49,6 @@ class Query(object):
    - query
    - configuration
    - hadoop_user
-   - hadoop_groups
   """
 
   thrift_spec = (
@@ -58,14 +57,12 @@ class Query(object):
     None, # 2
     (3, TType.LIST, 'configuration', (TType.STRING,None), None, ), # 3
     (4, TType.STRING, 'hadoop_user', None, None, ), # 4
-    (5, TType.LIST, 'hadoop_groups', (TType.STRING,None), None, ), # 5
   )
 
-  def __init__(self, query=None, configuration=None, hadoop_user=None, hadoop_groups=None,):
+  def __init__(self, query=None, configuration=None, hadoop_user=None,):
     self.query = query
     self.configuration = configuration
     self.hadoop_user = hadoop_user
-    self.hadoop_groups = hadoop_groups
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -96,16 +93,6 @@ class Query(object):
           self.hadoop_user = iprot.readString();
         else:
           iprot.skip(ftype)
-      elif fid == 5:
-        if ftype == TType.LIST:
-          self.hadoop_groups = []
-          (_etype9, _size6) = iprot.readListBegin()
-          for _i10 in xrange(_size6):
-            _elem11 = iprot.readString();
-            self.hadoop_groups.append(_elem11)
-          iprot.readListEnd()
-        else:
-          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -123,21 +110,14 @@ class Query(object):
     if self.configuration != None:
       oprot.writeFieldBegin('configuration', TType.LIST, 3)
       oprot.writeListBegin(TType.STRING, len(self.configuration))
-      for iter12 in self.configuration:
-        oprot.writeString(iter12)
+      for iter6 in self.configuration:
+        oprot.writeString(iter6)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.hadoop_user != None:
       oprot.writeFieldBegin('hadoop_user', TType.STRING, 4)
       oprot.writeString(self.hadoop_user)
       oprot.writeFieldEnd()
-    if self.hadoop_groups != None:
-      oprot.writeFieldBegin('hadoop_groups', TType.LIST, 5)
-      oprot.writeListBegin(TType.STRING, len(self.hadoop_groups))
-      for iter13 in self.hadoop_groups:
-        oprot.writeString(iter13)
-      oprot.writeListEnd()
-      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
     def validate(self):
@@ -328,20 +308,20 @@ class Results(object):
       elif fid == 2:
         if ftype == TType.LIST:
           self.columns = []
-          (_etype17, _size14) = iprot.readListBegin()
-          for _i18 in xrange(_size14):
-            _elem19 = iprot.readString();
-            self.columns.append(_elem19)
+          (_etype10, _size7) = iprot.readListBegin()
+          for _i11 in xrange(_size7):
+            _elem12 = iprot.readString();
+            self.columns.append(_elem12)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
       elif fid == 3:
         if ftype == TType.LIST:
           self.data = []
-          (_etype23, _size20) = iprot.readListBegin()
-          for _i24 in xrange(_size20):
-            _elem25 = iprot.readString();
-            self.data.append(_elem25)
+          (_etype16, _size13) = iprot.readListBegin()
+          for _i17 in xrange(_size13):
+            _elem18 = iprot.readString();
+            self.data.append(_elem18)
           iprot.readListEnd()
         else:
           iprot.skip(ftype)
@@ -372,15 +352,15 @@ class Results(object):
     if self.columns != None:
       oprot.writeFieldBegin('columns', TType.LIST, 2)
       oprot.writeListBegin(TType.STRING, len(self.columns))
-      for iter26 in self.columns:
-        oprot.writeString(iter26)
+      for iter19 in self.columns:
+        oprot.writeString(iter19)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.data != None:
       oprot.writeFieldBegin('data', TType.LIST, 3)
       oprot.writeListBegin(TType.STRING, len(self.data))
-      for iter27 in self.data:
-        oprot.writeString(iter27)
+      for iter20 in self.data:
+        oprot.writeString(iter20)
       oprot.writeListEnd()
       oprot.writeFieldEnd()
     if self.start_row != None:

+ 9 - 9
apps/beeswax/java/gen-java/com/cloudera/beeswax/api/BeeswaxService.java

@@ -6965,14 +6965,14 @@ public class BeeswaxService {
           case 0: // SUCCESS
             if (field.type == TType.LIST) {
               {
-                TList _list16 = iprot.readListBegin();
-                this.success = new ArrayList<ConfigVariable>(_list16.size);
-                for (int _i17 = 0; _i17 < _list16.size; ++_i17)
+                TList _list12 = iprot.readListBegin();
+                this.success = new ArrayList<ConfigVariable>(_list12.size);
+                for (int _i13 = 0; _i13 < _list12.size; ++_i13)
                 {
-                  ConfigVariable _elem18;
-                  _elem18 = new ConfigVariable();
-                  _elem18.read(iprot);
-                  this.success.add(_elem18);
+                  ConfigVariable _elem14;
+                  _elem14 = new ConfigVariable();
+                  _elem14.read(iprot);
+                  this.success.add(_elem14);
                 }
                 iprot.readListEnd();
               }
@@ -6998,9 +6998,9 @@ public class BeeswaxService {
         oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
         {
           oprot.writeListBegin(new TList(TType.STRUCT, this.success.size()));
-          for (ConfigVariable _iter19 : this.success)
+          for (ConfigVariable _iter15 : this.success)
           {
-            _iter19.write(oprot);
+            _iter15.write(oprot);
           }
           oprot.writeListEnd();
         }

+ 4 - 135
apps/beeswax/java/gen-java/com/cloudera/beeswax/api/Query.java

@@ -33,19 +33,16 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
   private static final TField QUERY_FIELD_DESC = new TField("query", TType.STRING, (short)1);
   private static final TField CONFIGURATION_FIELD_DESC = new TField("configuration", TType.LIST, (short)3);
   private static final TField HADOOP_USER_FIELD_DESC = new TField("hadoop_user", TType.STRING, (short)4);
-  private static final TField HADOOP_GROUPS_FIELD_DESC = new TField("hadoop_groups", TType.LIST, (short)5);
 
   public String query;
   public List<String> configuration;
   public String hadoop_user;
-  public List<String> hadoop_groups;
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   public enum _Fields implements TFieldIdEnum {
     QUERY((short)1, "query"),
     CONFIGURATION((short)3, "configuration"),
-    HADOOP_USER((short)4, "hadoop_user"),
-    HADOOP_GROUPS((short)5, "hadoop_groups");
+    HADOOP_USER((short)4, "hadoop_user");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -66,8 +63,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
           return CONFIGURATION;
         case 4: // HADOOP_USER
           return HADOOP_USER;
-        case 5: // HADOOP_GROUPS
-          return HADOOP_GROUPS;
         default:
           return null;
       }
@@ -119,9 +114,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
             new FieldValueMetaData(TType.STRING))));
     tmpMap.put(_Fields.HADOOP_USER, new FieldMetaData("hadoop_user", TFieldRequirementType.DEFAULT, 
         new FieldValueMetaData(TType.STRING)));
-    tmpMap.put(_Fields.HADOOP_GROUPS, new FieldMetaData("hadoop_groups", TFieldRequirementType.DEFAULT, 
-        new ListMetaData(TType.LIST, 
-            new FieldValueMetaData(TType.STRING))));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     FieldMetaData.addStructMetaDataMap(Query.class, metaDataMap);
   }
@@ -132,14 +124,12 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
   public Query(
     String query,
     List<String> configuration,
-    String hadoop_user,
-    List<String> hadoop_groups)
+    String hadoop_user)
   {
     this();
     this.query = query;
     this.configuration = configuration;
     this.hadoop_user = hadoop_user;
-    this.hadoop_groups = hadoop_groups;
   }
 
   /**
@@ -159,13 +149,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
     if (other.isSetHadoop_user()) {
       this.hadoop_user = other.hadoop_user;
     }
-    if (other.isSetHadoop_groups()) {
-      List<String> __this__hadoop_groups = new ArrayList<String>();
-      for (String other_element : other.hadoop_groups) {
-        __this__hadoop_groups.add(other_element);
-      }
-      this.hadoop_groups = __this__hadoop_groups;
-    }
   }
 
   public Query deepCopy() {
@@ -177,7 +160,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
     this.query = null;
     this.configuration = null;
     this.hadoop_user = null;
-    this.hadoop_groups = null;
   }
 
   public String getQuery() {
@@ -267,45 +249,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
     }
   }
 
-  public int getHadoop_groupsSize() {
-    return (this.hadoop_groups == null) ? 0 : this.hadoop_groups.size();
-  }
-
-  public java.util.Iterator<String> getHadoop_groupsIterator() {
-    return (this.hadoop_groups == null) ? null : this.hadoop_groups.iterator();
-  }
-
-  public void addToHadoop_groups(String elem) {
-    if (this.hadoop_groups == null) {
-      this.hadoop_groups = new ArrayList<String>();
-    }
-    this.hadoop_groups.add(elem);
-  }
-
-  public List<String> getHadoop_groups() {
-    return this.hadoop_groups;
-  }
-
-  public Query setHadoop_groups(List<String> hadoop_groups) {
-    this.hadoop_groups = hadoop_groups;
-    return this;
-  }
-
-  public void unsetHadoop_groups() {
-    this.hadoop_groups = null;
-  }
-
-  /** Returns true if field hadoop_groups is set (has been asigned a value) and false otherwise */
-  public boolean isSetHadoop_groups() {
-    return this.hadoop_groups != null;
-  }
-
-  public void setHadoop_groupsIsSet(boolean value) {
-    if (!value) {
-      this.hadoop_groups = null;
-    }
-  }
-
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case QUERY:
@@ -332,14 +275,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
       }
       break;
 
-    case HADOOP_GROUPS:
-      if (value == null) {
-        unsetHadoop_groups();
-      } else {
-        setHadoop_groups((List<String>)value);
-      }
-      break;
-
     }
   }
 
@@ -354,9 +289,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
     case HADOOP_USER:
       return getHadoop_user();
 
-    case HADOOP_GROUPS:
-      return getHadoop_groups();
-
     }
     throw new IllegalStateException();
   }
@@ -374,8 +306,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
       return isSetConfiguration();
     case HADOOP_USER:
       return isSetHadoop_user();
-    case HADOOP_GROUPS:
-      return isSetHadoop_groups();
     }
     throw new IllegalStateException();
   }
@@ -420,15 +350,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
         return false;
     }
 
-    boolean this_present_hadoop_groups = true && this.isSetHadoop_groups();
-    boolean that_present_hadoop_groups = true && that.isSetHadoop_groups();
-    if (this_present_hadoop_groups || that_present_hadoop_groups) {
-      if (!(this_present_hadoop_groups && that_present_hadoop_groups))
-        return false;
-      if (!this.hadoop_groups.equals(that.hadoop_groups))
-        return false;
-    }
-
     return true;
   }
 
@@ -451,11 +372,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
     if (present_hadoop_user)
       builder.append(hadoop_user);
 
-    boolean present_hadoop_groups = true && (isSetHadoop_groups());
-    builder.append(present_hadoop_groups);
-    if (present_hadoop_groups)
-      builder.append(hadoop_groups);
-
     return builder.toHashCode();
   }
 
@@ -497,16 +413,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
         return lastComparison;
       }
     }
-    lastComparison = Boolean.valueOf(isSetHadoop_groups()).compareTo(typedOther.isSetHadoop_groups());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetHadoop_groups()) {
-      lastComparison = TBaseHelper.compareTo(this.hadoop_groups, typedOther.hadoop_groups);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
     return 0;
   }
 
@@ -555,23 +461,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
-        case 5: // HADOOP_GROUPS
-          if (field.type == TType.LIST) {
-            {
-              TList _list3 = iprot.readListBegin();
-              this.hadoop_groups = new ArrayList<String>(_list3.size);
-              for (int _i4 = 0; _i4 < _list3.size; ++_i4)
-              {
-                String _elem5;
-                _elem5 = iprot.readString();
-                this.hadoop_groups.add(_elem5);
-              }
-              iprot.readListEnd();
-            }
-          } else { 
-            TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
         default:
           TProtocolUtil.skip(iprot, field.type);
       }
@@ -596,9 +485,9 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
       oprot.writeFieldBegin(CONFIGURATION_FIELD_DESC);
       {
         oprot.writeListBegin(new TList(TType.STRING, this.configuration.size()));
-        for (String _iter6 : this.configuration)
+        for (String _iter3 : this.configuration)
         {
-          oprot.writeString(_iter6);
+          oprot.writeString(_iter3);
         }
         oprot.writeListEnd();
       }
@@ -609,18 +498,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
       oprot.writeString(this.hadoop_user);
       oprot.writeFieldEnd();
     }
-    if (this.hadoop_groups != null) {
-      oprot.writeFieldBegin(HADOOP_GROUPS_FIELD_DESC);
-      {
-        oprot.writeListBegin(new TList(TType.STRING, this.hadoop_groups.size()));
-        for (String _iter7 : this.hadoop_groups)
-        {
-          oprot.writeString(_iter7);
-        }
-        oprot.writeListEnd();
-      }
-      oprot.writeFieldEnd();
-    }
     oprot.writeFieldStop();
     oprot.writeStructEnd();
   }
@@ -653,14 +530,6 @@ public class Query implements TBase<Query, Query._Fields>, java.io.Serializable,
       sb.append(this.hadoop_user);
     }
     first = false;
-    if (!first) sb.append(", ");
-    sb.append("hadoop_groups:");
-    if (this.hadoop_groups == null) {
-      sb.append("null");
-    } else {
-      sb.append(this.hadoop_groups);
-    }
-    first = false;
     sb.append(")");
     return sb.toString();
   }

+ 16 - 16
apps/beeswax/java/gen-java/com/cloudera/beeswax/api/Results.java

@@ -612,13 +612,13 @@ public class Results implements TBase<Results, Results._Fields>, java.io.Seriali
         case 2: // COLUMNS
           if (field.type == TType.LIST) {
             {
-              TList _list8 = iprot.readListBegin();
-              this.columns = new ArrayList<String>(_list8.size);
-              for (int _i9 = 0; _i9 < _list8.size; ++_i9)
+              TList _list4 = iprot.readListBegin();
+              this.columns = new ArrayList<String>(_list4.size);
+              for (int _i5 = 0; _i5 < _list4.size; ++_i5)
               {
-                String _elem10;
-                _elem10 = iprot.readString();
-                this.columns.add(_elem10);
+                String _elem6;
+                _elem6 = iprot.readString();
+                this.columns.add(_elem6);
               }
               iprot.readListEnd();
             }
@@ -629,13 +629,13 @@ public class Results implements TBase<Results, Results._Fields>, java.io.Seriali
         case 3: // DATA
           if (field.type == TType.LIST) {
             {
-              TList _list11 = iprot.readListBegin();
-              this.data = new ArrayList<String>(_list11.size);
-              for (int _i12 = 0; _i12 < _list11.size; ++_i12)
+              TList _list7 = iprot.readListBegin();
+              this.data = new ArrayList<String>(_list7.size);
+              for (int _i8 = 0; _i8 < _list7.size; ++_i8)
               {
-                String _elem13;
-                _elem13 = iprot.readString();
-                this.data.add(_elem13);
+                String _elem9;
+                _elem9 = iprot.readString();
+                this.data.add(_elem9);
               }
               iprot.readListEnd();
             }
@@ -681,9 +681,9 @@ public class Results implements TBase<Results, Results._Fields>, java.io.Seriali
       oprot.writeFieldBegin(COLUMNS_FIELD_DESC);
       {
         oprot.writeListBegin(new TList(TType.STRING, this.columns.size()));
-        for (String _iter14 : this.columns)
+        for (String _iter10 : this.columns)
         {
-          oprot.writeString(_iter14);
+          oprot.writeString(_iter10);
         }
         oprot.writeListEnd();
       }
@@ -693,9 +693,9 @@ public class Results implements TBase<Results, Results._Fields>, java.io.Seriali
       oprot.writeFieldBegin(DATA_FIELD_DESC);
       {
         oprot.writeListBegin(new TList(TType.STRING, this.data.size()));
-        for (String _iter15 : this.data)
+        for (String _iter11 : this.data)
         {
-          oprot.writeString(_iter15);
+          oprot.writeString(_iter11);
         }
         oprot.writeListEnd();
       }

+ 0 - 2
apps/beeswax/src/beeswax/management/commands/beeswax_install_examples.py

@@ -49,7 +49,6 @@ import hive_metastore.ttypes
 
 LOG = logging.getLogger(__name__)
 HADOOP_USER = 'hue'
-HADOOP_GRP = 'supergroup'
 
 class InstallException(Exception):
   pass
@@ -141,7 +140,6 @@ def _make_query_msg(hql):
   """
   query_msg = BeeswaxService.Query(query=hql, configuration=[])
   query_msg.hadoop_user = HADOOP_USER
-  query_msg.hadoop_groups = (HADOOP_GRP,)
   return query_msg
 
 

+ 0 - 1
apps/beeswax/src/beeswax/tests.py

@@ -383,7 +383,6 @@ for x in sys.stdin:
     query_msg.query = 'SELECT * FROM test'
     query_msg.configuration = []
     query_msg.hadoop_user = "test"
-    query_msg.hadoop_groups = ["test"]
     handle = beeswax.db_utils.db_client().query(query_msg)
     query_data = beeswax.models.QueryHistory(server_id=handle.id, log_context=handle.log_context)
     # Get the result in xls. Then translate it into csv.

+ 0 - 1
apps/beeswax/src/beeswax/views.py

@@ -177,7 +177,6 @@ def make_beeswax_query(request, hql, query_form=None):
 
   # Configure running user and group.
   query_msg.hadoop_user = request.user.username
-  query_msg.hadoop_groups = request.user.get_groups()
 
   if query_form is not None:
     for f in query_form.settings.forms:

+ 0 - 1
apps/beeswax/thrift/beeswax.thrift

@@ -31,7 +31,6 @@ struct Query {
 
   // User and groups to "act as" for purposes of Hadoop.
   4: string hadoop_user;
-  5: list<string> hadoop_groups;
 }
 
 typedef string LogContextId