Przeglądaj źródła

[gs] Add Google Storage filebrowser support in Hue (#3455)

What changes were proposed in this pull request?

- Adding Google Storage Filebrowser in Hue as a separate Filebrowser piggybacking on S3 Filesystem implementation.
- Some changes in boto package were also required to make the implementation work.

How was this patch tested?
- Manually.
- Added new unit tests.
Harsh Gupta 2 lat temu
rodzic
commit
56271e16c0
33 zmienionych plików z 1316 dodań i 45 usunięć
  1. 3 0
      apps/filebrowser/src/filebrowser/api.py
  2. 3 0
      apps/filebrowser/src/filebrowser/forms.py
  3. 6 0
      apps/filebrowser/src/filebrowser/templates/fb_components.mako
  4. 10 7
      apps/filebrowser/src/filebrowser/templates/listdir.mako
  5. 38 18
      apps/filebrowser/src/filebrowser/templates/listdir_components.mako
  6. 13 1
      apps/filebrowser/src/filebrowser/views.py
  7. 4 1
      desktop/conf.dist/hue.ini
  8. 9 6
      desktop/conf/pseudo-distributed.ini.tmpl
  9. 7 0
      desktop/core/src/desktop/conf.py
  10. 72 1
      desktop/core/src/desktop/js/api/apiHelper.js
  11. 1 0
      desktop/core/src/desktop/js/api/urls.js
  12. 3 0
      desktop/core/src/desktop/js/apps/editor/components/aceEditor/autocomplete/AutocompleteResults.ts
  13. 2 0
      desktop/core/src/desktop/js/components/sidebar/HueSidebar.vue
  14. 17 0
      desktop/core/src/desktop/js/jquery/plugins/jquery.filechooser.js
  15. 10 0
      desktop/core/src/desktop/js/ko/bindings/ace/ko.aceEditor.js
  16. 4 0
      desktop/core/src/desktop/js/ko/components/assist/assistStorageEntry.js
  17. 1 0
      desktop/core/src/desktop/js/ko/components/assist/ko.assistPanel.js
  18. 1 0
      desktop/core/src/desktop/js/ko/components/assist/ko.assistStoragePanel.js
  19. 1 0
      desktop/core/src/desktop/js/ko/components/ko.editorDroppableMenu.js
  20. 3 0
      desktop/core/src/desktop/js/sql/autocompleteResults.js
  21. 99 0
      desktop/core/src/desktop/lib/fs/gc/__init__.py
  22. 5 8
      desktop/core/src/desktop/lib/fs/gc/client.py
  23. 471 0
      desktop/core/src/desktop/lib/fs/gc/gs.py
  24. 50 0
      desktop/core/src/desktop/lib/fs/gc/gsfile.py
  25. 40 0
      desktop/core/src/desktop/lib/fs/gc/gsfile_test.py
  26. 100 0
      desktop/core/src/desktop/lib/fs/gc/gsstat.py
  27. 80 0
      desktop/core/src/desktop/lib/fs/gc/gsstat_test.py
  28. 175 0
      desktop/core/src/desktop/lib/fs/gc/upload.py
  29. 65 0
      desktop/core/src/desktop/lib/fs/gc/upload_test.py
  30. 6 0
      desktop/core/src/desktop/lib/fs/ozone/upload_test.py
  31. 10 0
      desktop/core/src/desktop/models.py
  32. 4 1
      desktop/core/src/desktop/settings.py
  33. 3 2
      desktop/libs/aws/src/aws/s3/s3fs.py

+ 3 - 0
apps/filebrowser/src/filebrowser/api.py

@@ -21,6 +21,7 @@ from desktop.lib.django_util import JsonResponse
 from desktop.lib import fsmanager
 from desktop.lib.i18n import smart_unicode
 from desktop.lib.fs.ozone.ofs import get_ofs_home_directory
+from desktop.lib.fs.gc.gs import get_gs_home_directory
 
 from azure.abfs.__init__ import get_home_dir_for_abfs
 from aws.s3.s3fs import get_s3_home_directory
@@ -66,6 +67,8 @@ def get_filesystems_with_home_dirs(request): # Using as a public API only for no
       user_home_dir = request.user.get_home_directory()
     elif fs == 's3a':
       user_home_dir = get_s3_home_directory(request.user)
+    elif fs == 'gs':
+      user_home_dir = get_gs_home_directory(request.user)
     elif fs == 'abfs':
       user_home_dir = get_home_dir_for_abfs(request.user)
     elif fs == 'ofs':

+ 3 - 0
apps/filebrowser/src/filebrowser/forms.py

@@ -30,6 +30,7 @@ from django.forms.formsets import formset_factory, BaseFormSet
 from aws.s3 import S3A_ROOT, normpath as s3_normpath
 from azure.abfs.__init__ import ABFS_ROOT, normpath as abfs_normpath
 from desktop.lib.fs.ozone import OFS_ROOT, normpath as ofs_normpath
+from desktop.lib.fs.gc import GS_ROOT, normpath as gs_normpath
 from desktop.lib import i18n
 from hadoop.fs import normpath
 from useradmin.models import User, Group
@@ -79,6 +80,8 @@ class PathField(CharField):
       value = ''
     elif value.lower().startswith(S3A_ROOT):
       cleaned_path = s3_normpath(cleaned_path)
+    elif value.lower().startswith(GS_ROOT):
+      cleaned_path = gs_normpath(cleaned_path)
     elif value.lower().startswith(ABFS_ROOT):
       cleaned_path = abfs_normpath(cleaned_path)
     elif value.lower().startswith(OFS_ROOT):

+ 6 - 0
apps/filebrowser/src/filebrowser/templates/fb_components.mako

@@ -40,6 +40,12 @@ else:
               <i class="fa fa-fw fa-cubes"></i> ${ get_default_region() }
             </span>
           </li>
+        %elif path.lower().find('gs://') == 0:
+          <li style="padding-top: 12px">
+            <span class="breadcrumb-link homeLink">
+              <svg class="hi"><use xlink:href='#hi-adls'></use></svg>
+            </span>
+          </li>
         %elif path.lower().find('adl:/') == 0:
           <li style="padding-top: 12px">
             <span class="breadcrumb-link homeLink">

+ 10 - 7
apps/filebrowser/src/filebrowser/templates/listdir.mako

@@ -49,7 +49,7 @@ ${ fb_components.menubar() }
   %endif
 </style>
 
-<div id="${ path.startswith('s3a://') and 'filebrowser_s3Components' or path.startswith('abfs://') and 'filebrowser_abfsComponents'  or path.startswith('ofs://') and 'filebrowser_ofsComponents'or 'filebrowserComponents' }" class="container-fluid filebrowser" style="min-height: calc(100vh - 130px);">
+<div id="${ path.startswith('s3a://') and 'filebrowser_s3Components' or path.startswith('gs://') and 'filebrowser_gsComponents' or path.startswith('abfs://') and 'filebrowser_abfsComponents'  or path.startswith('ofs://') and 'filebrowser_ofsComponents'or 'filebrowserComponents' }" class="container-fluid filebrowser" style="min-height: calc(100vh - 130px);">
   <div class="card card-small">
     <div class="actionbar">
     <%actionbar:render>
@@ -61,7 +61,7 @@ ${ fb_components.menubar() }
         <div class="btn-toolbar" style="display: inline; vertical-align: middle">
           <div id="ch-dropdown" class="btn-group" style="vertical-align: middle">
             <button class="btn dropdown-toggle" title="${_('Actions')}" data-toggle="dropdown"
-            data-bind="visible: !inTrash(), enable: selectedFiles().length > 0 && ((!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !(isOFSRoot() || isOFSServiceID() || isOFSVol())))">
+            data-bind="visible: !inTrash(), enable: selectedFiles().length > 0 && ((!isS3() && !isGS() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !(isOFSRoot() || isOFSServiceID() || isOFSVol())))">
               <i class="fa fa-cog"></i> ${_('Actions')}
               <span class="caret" style="line-height: 15px"></span>
             </button>
@@ -166,12 +166,15 @@ ${ fb_components.menubar() }
           <!-- ko if: isS3 -->
             <a class="btn fileToolbarBtn" title="${_('Upload files')}" data-bind="visible: !inTrash(), css: {'disabled': isS3Root()}, click: function(){ if (!isS3Root()) { uploadFile() }}"><i class="fa fa-arrow-circle-o-up"></i> ${_('Upload')}</a>
           <!-- /ko -->
+          <!-- ko if: isGS -->
+            <a class="btn fileToolbarBtn" title="${_('Upload files')}" data-bind="visible: !inTrash(), css: {'disabled': isGSRoot()}, click: function(){ if (!isGSRoot()) { uploadFile() }}"><i class="fa fa-arrow-circle-o-up"></i> ${_('Upload')}</a>
+          <!-- /ko -->
           <!-- ko if: isABFS -->
             <a class="btn fileToolbarBtn" title="${_('Upload files')}" data-bind="visible: !inTrash(), css: {'disabled': isABFSRoot()}, click: function(){ if (!isABFSRoot()) { uploadFile() }}"><i class="fa fa-arrow-circle-o-up"></i> ${_('Upload')}</a>
           <!-- /ko -->
-          <!-- ko ifnot: isS3() || isABFS() -->
+          <!-- ko ifnot: isS3() || isGS() || isABFS() -->
           <div id="upload-dropdown" class="btn-group" style="vertical-align: middle">
-            <a data-hue-analytics="filebrowser:upload-btn-click" href="javascript: void(0)" class="btn upload-link dropdown-toggle" title="${_('Upload')}" data-bind="click: uploadFile, visible: !inTrash(), css: {'disabled': isS3() && isS3Root() || isABFS() && isABFSRoot() || (isOFS() && (isOFSRoot() || isOFSServiceID() || isOFSVol()))}">
+            <a data-hue-analytics="filebrowser:upload-btn-click" href="javascript: void(0)" class="btn upload-link dropdown-toggle" title="${_('Upload')}" data-bind="click: uploadFile, visible: !inTrash(), css: {'disabled': (isOFS() && (isOFSRoot() || isOFSServiceID() || isOFSVol()))}">
               <i class="fa fa-arrow-circle-o-up"></i> ${_('Upload')}
             </a>
           </div>
@@ -183,11 +186,11 @@ ${ fb_components.menubar() }
               <span class="caret"></span>
             </a>
             <ul class="dropdown-menu pull-right" style="top: auto">
-              <li data-bind="visible: !isS3() && !isABFS() && !isOFS() || isS3() && !isS3Root() || isABFS() && !isABFSRoot() || isOFS() && !isOFSServiceID() && !isOFSVol()"><a data-hue-analytics="filebrowser:new-file-btn-click" href="javascript: void(0)" class="create-file-link" title="${_('File')}"><i class="fa fa-file-o"></i> ${_('File')}</a></li>
+              <li data-bind="visible: !isS3() && !isGS() && !isABFS() && !isOFS() || isS3() && !isS3Root() || isGS() && !isGSRoot() || isABFS() && !isABFSRoot() || isOFS() && !isOFSServiceID() && !isOFSVol()"><a data-hue-analytics="filebrowser:new-file-btn-click" href="javascript: void(0)" class="create-file-link" title="${_('File')}"><i class="fa fa-file-o"></i> ${_('File')}</a></li>
               <li><a href="javascript: void(0)" class="create-directory-link" title="${_('Directory')}">
                 <i class="fa fa-folder"></i>
-                <span data-bind="visible: !isS3() && !isABFS() && !isOFS() || isS3() && !isS3Root() || isABFS() && !isABFSRoot() || isOFS() && !isOFSServiceID() && !isOFSVol()">${_('Directory')}</span>
-                <span data-bind="visible: (isS3() && isS3Root()) || (isOFS() && isOFSVol())">${_('Bucket')}</span>
+                <span data-bind="visible: !isS3() && !isGS() && !isABFS() && !isOFS() || isS3() && !isS3Root() || isGS() && !isGSRoot() || isABFS() && !isABFSRoot() || isOFS() && !isOFSServiceID() && !isOFSVol()">${_('Directory')}</span>
+                <span data-bind="visible: (isS3() && isS3Root()) || (isGS() && isGSRoot()) || (isOFS() && isOFSVol())">${_('Bucket')}</span>
                 <span data-bind="visible: isABFS() && isABFSRoot()">${_('File System')}</span>
                 <span data-bind="visible: isOFS() && isOFSServiceID()">${_('Volume')}</span>
               </a></li>

+ 38 - 18
apps/filebrowser/src/filebrowser/templates/listdir_components.mako

@@ -180,11 +180,11 @@ else:
       <h2 class="modal-title">${ _('Confirm Delete') }</h2>
     </div>
     <div class="modal-body">
-      <!-- ko if: isS3() && isS3Root() -->
+      <!-- ko if: (isS3() && isS3Root()) || (isGS() && isGSRoot()) -->
       <p>${_('Are you sure you want to delete these buckets?')}</p>
       <p class="muted">${_('Deleting a bucket will delete all of its contents and release the bucket name to be reserved by others.')}</p>
       <!-- /ko -->
-      <!-- ko ifnot: isS3() && isS3Root() -->
+      <!-- ko ifnot: (isS3() && isS3Root()) && (isGS() && isGSRoot()) -->
       <!-- ko ifnot: $root.skipTrash -->
       <p>${_('Are you sure you want to delete these files?')}</p>
       <!-- /ko -->
@@ -461,10 +461,10 @@ else:
     <div id="createDirectoryModal" class="modal hide fade">
       <div class="modal-header">
         <button type="button" class="close" data-dismiss="modal" aria-label="${ _('Close') }"><span aria-hidden="true">&times;</span></button>
-        <!-- ko if: (!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol())  -->
+        <!-- ko if: (!isS3() && !isGS() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol())  -->
         <h2 class="modal-title">${_('Create Directory')}</h2>
         <!-- /ko -->
-        <!-- ko if: (isS3() && isS3Root()) || (isOFS() && isOFSVol()) -->
+        <!-- ko if: (isS3() && isS3Root()) || (isGS() && isGSRoot()) || (isOFS() && isOFSVol()) -->
         <h2 class="modal-title">${_('Create Bucket')}</h2>
         <!-- /ko -->
         <!-- ko if: isABFS() && isABFSRoot() -->
@@ -476,10 +476,10 @@ else:
       </div>
       <div class="modal-body">
         <label>
-          <!-- ko if: (!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()) -->
+          <!-- ko if: (!isS3() && !isGS() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()) -->
           ${_('Directory Name')}
           <!-- /ko -->
-          <!-- ko if: (isS3() && isS3Root()) || (isOFS() && isOFSVol()) -->
+          <!-- ko if: (isS3() && isS3Root()) || (isGS() && isGSRoot()) || (isOFS() && isOFSVol()) -->
           ${_('Bucket Name')}
           <!-- /ko -->
           <!-- ko if: isABFS() && isABFSRoot() -->
@@ -587,12 +587,12 @@ else:
   <!-- actions context menu -->
   <ul class="context-menu dropdown-menu">
   <!-- ko ifnot: $root.inTrash -->
-    <li data-bind="visible: (!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length != 1 || isCurrentDirSelected().length > 0}">
+    <li data-bind="visible: (!isS3() && !isGS() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length != 1 || isCurrentDirSelected().length > 0}">
     <a href="javascript: void(0)" title="${_('Rename')}" data-bind="click: ($root.selectedFiles().length == 1 && isCurrentDirSelected().length == 0) ? $root.renameFile: void(0)"><i class="fa fa-fw fa-font"></i>
     ${_('Rename')}</a></li>
-    <li data-bind="visible: (!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length == 0 || isCurrentDirSelected().length > 0}">
+    <li data-bind="visible: (!isS3() && !isGS() &&  !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length == 0 || isCurrentDirSelected().length > 0}">
     <a href="javascript: void(0)" title="${_('Move')}" data-bind="click: ( $root.selectedFiles().length > 0 && isCurrentDirSelected().length == 0) ? $root.move: void(0)"><i class="fa fa-fw fa-random"></i> ${_('Move')}</a></li>
-    <li data-bind="visible: (!isS3() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length == 0 || isCurrentDirSelected().length > 0}">
+    <li data-bind="visible: (!isS3() && !isGS() && !isABFS() && !isOFS()) || (isS3() && !isS3Root()) || (isGS() && !isGSRoot()) || (isABFS() && !isABFSRoot()) || (isOFS() && !isOFSServiceID() && !isOFSVol()), css: {'disabled': $root.selectedFiles().length == 0 || isCurrentDirSelected().length > 0}">
     <a href="javascript: void(0)" title="${_('Copy')}" data-bind="click: ($root.selectedFiles().length > 0 && isCurrentDirSelected().length == 0) ? $root.copy: void(0)"><i class="fa fa-fw fa-files-o"></i> ${_('Copy')}</a></li>
     % if show_download_button:
     <li data-bind="css: {'disabled': $root.inTrash() || $root.selectedFiles().length != 1 || selectedFile().type != 'file'}">
@@ -649,7 +649,7 @@ else:
   <div id="submit-wf-modal" class="modal hide"></div>
 
   <script id="fileTemplate" type="text/html">
-    <tr class="row-animated" style="cursor: pointer" data-bind="visible: !(name == '..' && window.RAZ_IS_ENABLED), drop: { enabled: name !== '.' && type !== 'file' && (!$root.isS3() || ($root.isS3() && !$root.isS3Root())), value: $data }, event: { mouseover: toggleHover, mouseout: toggleHover, contextmenu: showContextMenu }, click: $root.viewFile, css: { 'row-selected': selected(), 'row-highlighted': highlighted(), 'row-deleted': deleted() }">
+    <tr class="row-animated" style="cursor: pointer" data-bind="visible: !(name == '..' && window.RAZ_IS_ENABLED), drop: { enabled: name !== '.' && type !== 'file' && ((!$root.isS3() || ($root.isS3() && !$root.isS3Root())) || (!$root.isGS() || ($root.isGS() && !$root.isGSRoot()))), value: $data }, event: { mouseover: toggleHover, mouseout: toggleHover, contextmenu: showContextMenu }, click: $root.viewFile, css: { 'row-selected': selected(), 'row-highlighted': highlighted(), 'row-deleted': deleted() }">
       <td class="center" data-bind="click: name !== '..' ? handleSelect : void(0)" style="cursor: default">
         <div data-bind="multiCheck: '#fileBrowserTable', visible: name != '..', css: { 'hue-checkbox': name != '..', 'fa': name != '..', 'fa-check': selected }"></div>
       </td>
@@ -663,7 +663,7 @@ else:
         <a href="#" data-bind="click: $root.viewFile"><i class="fa fa-level-up"></i></a>
         <!-- /ko -->
         <!-- ko if: name != '..' -->
-        <strong><a href="#" class="draggable-fb" data-bind="drag: { enabled: (!$root.isS3() || ($root.isS3() && !$root.isS3Root())), value: $data }, click: $root.viewFile, text: name, attr: { 'draggable': $.inArray(name, ['.', '..', '.Trash']) === -1 && !isBucket()}"></a></strong>
+        <strong><a href="#" class="draggable-fb" data-bind="drag: { enabled: (!$root.isS3() || ($root.isS3() && !$root.isS3Root()) || (!$root.isGS() || ($root.isGS() && !$root.isGSRoot()))), value: $data }, click: $root.viewFile, text: name, attr: { 'draggable': $.inArray(name, ['.', '..', '.Trash']) === -1 && !isBucket()}"></a></strong>
         <!-- /ko -->
       </td>
       <td>
@@ -688,9 +688,9 @@ else:
         % endif
       </td>
       <td>
-        <span data-bind="text: permissions, visible: $root.isS3() || !selected() || $root.isCurrentDirSentryManaged() || isSentryManaged"></span>
+        <span data-bind="text: permissions, visible: $root.isS3() || $root.isGS() || !selected() || $root.isCurrentDirSentryManaged() || isSentryManaged"></span>
         <a href="#" rel="tooltip" title="${_('Change permissions')}"
-            data-bind="text: permissions, visible: !$root.isS3() && !$root.inTrash() && selected() && !$root.isCurrentDirSentryManaged() && !isSentryManaged, click: $root.changePermissions" data-original-title="${_('Change permissions')}"></a>
+            data-bind="text: permissions, visible: !$root.isS3() && !$root.isGS() && !$root.inTrash() && selected() && !$root.isCurrentDirSentryManaged() && !isSentryManaged, click: $root.changePermissions" data-original-title="${_('Change permissions')}"></a>
       </td>
       <td data-bind="text: stats.mtime" style="white-space: nowrap;"></td>
     </tr>
@@ -856,7 +856,7 @@ else:
           replication: file.stats.replication
         },
         isBucket: ko.pureComputed(function(){
-          return file.path.toLowerCase().indexOf('s3a://') == 0 && file.path.substr(6).indexOf('/') == -1
+          return (file.path.toLowerCase().indexOf('s3a://') == 0 && file.path.substr(6).indexOf('/') == -1) || (file.path.toLowerCase().indexOf('gs://') == 0 && file.path.substr(5).indexOf('/') == -1)
         }),
         selected: ko.observable(file.highlighted && fileBrowserViewModel.isArchive(file.name) || false),
         highlighted: ko.observable(file.highlighted || false),
@@ -1068,6 +1068,10 @@ else:
         return self.currentPath().toLowerCase().indexOf('s3a://') === 0;
       });
 
+      self.isGS = ko.pureComputed(function () {
+        return self.currentPath().toLowerCase().indexOf('gs://') === 0;
+      });
+
       self.isAdls = ko.pureComputed(function () {
         return self.currentPath().toLowerCase().indexOf('adl:/') === 0;
       });
@@ -1091,6 +1095,8 @@ else:
           return 'adls';
         } else if (scheme === 's3a' ){
           return 's3';
+        } else if (scheme === 'gs' ){
+          return 'gs';
         } else if (scheme === 'ofs' ){
           return 'ofs';
         } else if (!scheme || scheme == 'hdfs') {
@@ -1104,6 +1110,8 @@ else:
         var path = path && path.toLowerCase();
         if (path.indexOf('s3a://') >= 0) {
           return 's3a://';
+        } else if (path.indexOf('gs://') >= 0) {
+          return 'gs://';
         } else if (path.indexOf('adl:/') >= 0) {
           return 'adl:/';
         } else if (path.indexOf('abfs://') >= 0) {
@@ -1128,13 +1136,13 @@ else:
         return currentPath.indexOf('/') === 0 || currentPath.indexOf('hdfs') === 0
       });
       self.isCompressEnabled = ko.pureComputed(function () {
-        return !self.isS3() && !self.isAdls() && !self.isABFS() && !self.isOFS();
+        return !self.isS3() && !self.isGS() && !self.isAdls() && !self.isABFS() && !self.isOFS();
       });
       self.isSummaryEnabled = ko.pureComputed(function () {
         return self.isHdfs() || self.isOFS();
       });
       self.isPermissionEnabled = ko.pureComputed(function () {
-        return !self.isS3() && !self.isABFSRoot() && !self.isOFS();
+        return !self.isS3() && !self.isGS() && !self.isABFSRoot() && !self.isOFS();
       });
       self.isReplicationEnabled = ko.pureComputed(function () {
         return self.isHdfs();
@@ -1146,10 +1154,20 @@ else:
         }
       });
 
+      self.isGS.subscribe(function (newVal) {
+        if (newVal) {
+          huePubSub.publish('update.autocompleters');
+        }
+      });
+
       self.isS3Root = ko.pureComputed(function () {
         return self.isS3() && self.currentPath().toLowerCase() === 's3a://';
       });
 
+      self.isGSRoot = ko.pureComputed(function () {
+        return self.isGS() && self.currentPath().toLowerCase() === 'gs://';
+      });
+
       self.isABFSRoot = ko.pureComputed(function () {
         return self.isABFS() && self.currentPath().toLowerCase() === 'abfs://';
       });
@@ -2160,7 +2178,7 @@ else:
         $('.filebrowser').on('dragenter', function (e) {
           e.preventDefault();
 
-          if (_isExternalFile && !($("#uploadFileModal").is(":visible")) && (!fileBrowserViewModel.isS3() || (fileBrowserViewModel.isS3() && !fileBrowserViewModel.isS3Root()))) {
+          if (_isExternalFile && !($("#uploadFileModal").is(":visible")) && ((!fileBrowserViewModel.isS3() || (fileBrowserViewModel.isS3() && !fileBrowserViewModel.isS3Root())) || (!fileBrowserViewModel.isGS() || (fileBrowserViewModel.isGS() && !fileBrowserViewModel.isGSRoot())))) {
             showHoverMsg("${_('Drop files here to upload')}");
           }
         });
@@ -2261,7 +2279,7 @@ else:
               }
             }
           };
-          if (ops.path.toLowerCase() !== 's3a://') {
+          if (ops.path.toLowerCase() !== 's3a://' && ops.path.toLowerCase() !== 'gs://') {
             _dropzone = new Dropzone($('.filebrowser .hoverMsg')[0], options);
 
             _dropzone.on('queuecomplete', function () {
@@ -2432,6 +2450,7 @@ else:
             fileBrowserViewModel.enableMoveButton(allowMove);
           },
           isS3: fileBrowserViewModel.isS3(),
+          isGS: fileBrowserViewModel.isGS(),
           root: fileBrowserViewModel.rootCurrent()
         });
         $("#copyDestination").jHueHdfsAutocomplete({
@@ -2444,6 +2463,7 @@ else:
             fileBrowserViewModel.enableCopyButton(allowCopy);
           },
           isS3: fileBrowserViewModel.isS3(),
+          isGS: fileBrowserViewModel.isGS(),
           root: fileBrowserViewModel.rootCurrent()
         });
       });

+ 13 - 1
apps/filebrowser/src/filebrowser/views.py

@@ -58,6 +58,7 @@ from desktop.lib.export_csvxls import file_reader
 from desktop.lib.exceptions_renderable import PopupException
 from desktop.lib.fs import splitpath
 from desktop.lib.fs.ozone.ofs import get_ofs_home_directory
+from desktop.lib.fs.gc.gs import get_gs_home_directory
 from desktop.lib.i18n import smart_str
 from desktop.lib.paths import SAFE_CHARACTERS_URI, SAFE_CHARACTERS_URI_COMPONENTS
 from desktop.lib.tasks.compress_files.compress_utils import compress_files_in_hdfs
@@ -149,7 +150,7 @@ def _decode_slashes(path):
   # as %2F while the rest of the path is actually decoded. 
   encoded_slash = '%2F'
   if path.startswith(encoded_slash) or path.startswith('abfs:' + encoded_slash) or \
-    path.startswith('s3a:' + encoded_slash) or path.startswith('ofs:' + encoded_slash):
+    path.startswith('s3a:' + encoded_slash) or path.startswith('gs:' + encoded_slash) or path.startswith('ofs:' + encoded_slash):
     path = path.replace(encoded_slash, '/')
 
   return path
@@ -162,6 +163,8 @@ def _normalize_path(path):
     path = path.replace('abfs:/', 'abfs://')
   if path.startswith('s3a:/') and not path.startswith('s3a://'):
     path = path.replace('s3a:/', 's3a://')
+  if path.startswith('gs:/') and not path.startswith('gs://'):
+    path = path.replace('gs:/', 'gs://')
   if path.startswith('ofs:/') and not path.startswith('ofs://'):
     path = path.replace('ofs:/', 'ofs://')
 
@@ -253,6 +256,15 @@ def view(request, path):
           '/filebrowser/view=' + urllib_quote(home_dir_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
       )
 
+  # default_gs_home is set in jquery.filechooser.js
+  if 'default_gs_home' in request.GET:
+    home_dir_path = get_gs_home_directory()
+    if request.fs.isdir(home_dir_path):
+      return format_preserving_redirect(
+          request,
+          '/filebrowser/view=' + urllib_quote(home_dir_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
+      )
+
   # default_ofs_home is set in jquery.filechooser.js
   if 'default_ofs_home' in request.GET:
     home_dir_path = get_ofs_home_directory()

+ 4 - 1
desktop/conf.dist/hue.ini

@@ -935,10 +935,13 @@ tls=no
 
 # Settings for the Google Cloud lib
 # ------------------------------------------------------------------------
+# Maximum number of keys with specific directory prefix that can be deleted in a single bulk operation in GS.
+## gs_bulk_delete_dir_keys_max_limit=100
+
 [[gc_accounts]]
 [[[default]]]
 # The JSON credentials to authenticate to Google Cloud e.g. '{ "type": "service_account", "project_id": .... }'
-# json_credentials=None
+## json_credentials=None
 
 ## Configuration for Ozone File System
 # ------------------------------------------------------------------------

+ 9 - 6
desktop/conf/pseudo-distributed.ini.tmpl

@@ -916,12 +916,15 @@
    # Django cache to use to store temporarily used data during query execution. This is in addition to result_file_storage and result_backend.
    ## execution_storage='{"BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": "celery-hue"}'
 
-   # Settings for the Google Cloud lib
-   # ------------------------------------------------------------------------
-   [[gc_accounts]]
-      [[[default]]]
-        # The JSON credentials to authenticate to Google Cloud e.g. '{ "type": "service_account", "project_id": .... }'
-        # json_credentials=None
+  # Settings for the Google Cloud lib
+  # ------------------------------------------------------------------------
+  # Maximum number of keys with specific directory prefix that can be deleted in a single bulk operation in GS.
+  ## gs_bulk_delete_dir_keys_max_limit=100
+
+  [[gc_accounts]]
+    [[[default]]]
+      # The JSON credentials to authenticate to Google Cloud e.g. '{ "type": "service_account", "project_id": .... }'
+      ## json_credentials=None
 
   ## Configuration for Ozone File System
   # ------------------------------------------------------------------------

+ 7 - 0
desktop/core/src/desktop/conf.py

@@ -2585,6 +2585,13 @@ def get_ldap_bind_password(ldap_config):
 
 PERMISSION_ACTION_GS = "gs_access"
 
+GS_BULK_DELETE_DIR_KEYS_MAX_LIMIT = Config(
+  help=_('Maximum number of keys with specific directory prefix that can be deleted in a single bulk operation in GS.'),
+  key='gs_bulk_delete_dir_keys_max_limit',
+  default=100,
+  type=coerce_zero_or_positive_integer
+)
+
 GC_ACCOUNTS = UnspecifiedConfigSection(
   'gc_accounts',
   help=_('One entry for each GC account'),

+ 72 - 1
desktop/core/src/desktop/js/api/apiHelper.js

@@ -73,6 +73,7 @@ class ApiHelper {
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'ofs' }), {});
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'git' }), {});
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 's3' }), {});
+      setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'gs' }), {});
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'collections' }), {});
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'hbase' }), {});
       setInLocalStorage(this.getAssistCacheIdentifier({ sourceType: 'document' }), {});
@@ -216,7 +217,7 @@ class ApiHelper {
    *
    * @param {Object} options
    * @param {string[]} options.path
-   * @param {string} options.type - 's3', 'adls', 'abfs', 'ofs' or 'hdfs'
+   * @param {string} options.type - 's3', 'gs', 'adls', 'abfs', 'ofs' or 'hdfs'
    * @param {number} [options.offset]
    * @param {number} [options.length]
    * @param {boolean} [options.silenceErrors]
@@ -225,6 +226,8 @@ class ApiHelper {
     let url;
     if (options.type === 's3') {
       url = URLS.S3_API_PREFIX;
+    } else if (options.type === 'gs') {
+      url = URLS.GS_API_PREFIX;
     } else if (options.type === 'adls') {
       url = URLS.ADLS_API_PREFIX;
     } else if (options.type === 'abfs') {
@@ -657,6 +660,74 @@ class ApiHelper {
     );
   }
 
+  /**
+   * @param {Object} options
+   * @param {Function} options.successCallback
+   * @param {Function} [options.errorCallback]
+   * @param {boolean} [options.silenceErrors]
+   * @param {Number} [options.timeout]
+   * @param {Object} [options.editor] - Ace editor
+   *
+   * @param {string[]} options.pathParts
+   * @param {number} [options.pageSize] - Default 500
+   * @param {number} [options.page] - Default 1
+   * @param {string} [options.filter]
+   */
+  fetchGSPath(options) {
+    options.pathParts.shift(); // remove the trailing /
+    let url =
+      URLS.GS_API_PREFIX +
+      encodeURI(options.pathParts.join('/')) +
+      '?format=json&sortby=name&descending=false&pagesize=' +
+      (options.pageSize || 500) +
+      '&pagenum=' +
+      (options.page || 1);
+    if (options.filter) {
+      url += '&filter=' + options.filter;
+    }
+    const fetchFunction = storeInCache => {
+      if (options.timeout === 0) {
+        assistErrorCallback(options)({ status: -1 });
+        return;
+      }
+
+      $.ajax({
+        dataType: 'json',
+        url: url,
+        timeout: options.timeout,
+        success: data => {
+          if (
+            !data.error &&
+            !successResponseIsError(data) &&
+            typeof data.files !== 'undefined' &&
+            data.files !== null
+          ) {
+            if (data.files.length > 2 && !options.filter) {
+              storeInCache(data);
+            }
+            options.successCallback(data);
+          } else {
+            assistErrorCallback(options)(data);
+          }
+        }
+      })
+        .fail(assistErrorCallback(options))
+        .always(() => {
+          if (typeof options.editor !== 'undefined' && options.editor !== null) {
+            options.editor.hideSpinner();
+          }
+        });
+    };
+
+    this.fetchCached(
+      $.extend({}, options, {
+        sourceType: 'gs',
+        url: url,
+        fetchFunction: fetchFunction
+      })
+    );
+  }
+
   async fetchFavoriteApp(options) {
     return new Promise((resolve, reject) => {
       simpleGet('/desktop/api2/user_preferences/default_app').done(resolve).fail(reject);

+ 1 - 0
desktop/core/src/desktop/js/api/urls.js

@@ -28,6 +28,7 @@ export const ADLS_API_PREFIX = '/filebrowser/view=' + encodeURIComponent('adl:/'
 export const ABFS_API_PREFIX = '/filebrowser/view=' + encodeURIComponent('ABFS://');
 export const GIT_API_PREFIX = '/desktop/api/vcs/contents/';
 export const S3_API_PREFIX = '/filebrowser/view=' + encodeURIComponent('s3a://');
+export const GS_API_PREFIX = '/filebrowser/view=' + encodeURIComponent('gs://');
 export const IMPALA_INVALIDATE_API = '/impala/api/invalidate';
 export const CONFIG_SAVE_API = '/desktop/api/configurations/save/';
 export const CONFIG_APPS_API = '/desktop/api/configurations';

+ 3 - 0
desktop/core/src/desktop/js/apps/editor/components/aceEditor/autocomplete/AutocompleteResults.ts

@@ -1169,6 +1169,9 @@ class AutocompleteResults {
     if (/^s3a:\/\//i.test(path)) {
       fetchFunction = 'fetchS3Path';
       path = path.substring(5);
+    } else if (/^gs:\/\//i.test(path)) {
+      fetchFunction = 'fetchGSPath';
+      path = path.substring(4);
     } else if (/^adl:\/\//i.test(path)) {
       fetchFunction = 'fetchAdlsPath';
       path = path.substring(5);

+ 2 - 0
desktop/core/src/desktop/js/components/sidebar/HueSidebar.vue

@@ -306,6 +306,8 @@
           case 'filebrowser':
             if (location.href.indexOf('=S3A') !== -1) {
               adaptedName = 's3';
+            } else if (location.href.indexOf('=gs') !== -1) {
+              adaptedName = 'gs';
             } else if (location.href.indexOf('=adl') !== -1) {
               adaptedName = 'adls';
             } else if (location.href.indexOf('=abfs') !== -1) {

+ 17 - 0
desktop/core/src/desktop/js/jquery/plugins/jquery.filechooser.js

@@ -92,6 +92,23 @@ const defaults = {
         name: 'S3'
       }
     },
+    gs: {
+      scheme: 'gs',
+      root: 'gs://',
+      home: '/?default_gs_home',
+      icon: {
+        svg: {
+          brand: '#hi-adls',
+          home: '#hi-adls'
+        },
+        brand: 'fa-windows',
+        home: 'fa-windows'
+      },
+      label: {
+        home: '',
+        name: 'GS'
+      }
+    },
     adl: {
       scheme: 'adl',
       root: 'adl:/',

+ 10 - 0
desktop/core/src/desktop/js/ko/bindings/ace/ko.aceEditor.js

@@ -670,6 +670,16 @@ registerBinding(NAME, {
       dblClickS3ItemSub.remove();
     });
 
+    const dblClickGSItemSub = huePubSub.subscribe('assist.dblClickGSItem', assistGSEntry => {
+      if ($el.data('last-active-editor')) {
+        editor.session.insert(editor.getCursorPosition(), "'gs://" + assistGSEntry.path + "'");
+      }
+    });
+
+    disposeFunctions.push(() => {
+      dblClickGSItemSub.remove();
+    });
+
     const sampleErrorInsertSub = huePubSub.subscribe('sample.error.insert.click', popoverEntry => {
       const table = popoverEntry.identifierChain[popoverEntry.identifierChain.length - 1]['name'];
       const text = 'SELECT * FROM ' + table + ' LIMIT 100;';

+ 4 - 0
desktop/core/src/desktop/js/ko/components/assist/assistStorageEntry.js

@@ -40,6 +40,10 @@ const TYPE_SPECIFICS = {
     apiHelperFetchFunction: 'fetchS3Path',
     dblClickPubSubId: 'assist.dblClickS3Item'
   },
+  gs: {
+    apiHelperFetchFunction: 'fetchGSPath',
+    dblClickPubSubId: 'assist.dblClickGSItem'
+  },
   ofs: {
     apiHelperFetchFunction: 'fetchOfsPath',
     dblClickPubSubId: 'assist.dblClickOfsItem'

+ 1 - 0
desktop/core/src/desktop/js/ko/components/assist/ko.assistPanel.js

@@ -130,6 +130,7 @@ class AssistPanel {
               interpreter.type === 'adls' ||
               interpreter.type === 'hdfs' ||
               interpreter.type === 's3' ||
+              interpreter.type === 'gs' ||
               interpreter.type === 'abfs' ||
               interpreter.type === 'ofs'
           );

+ 1 - 0
desktop/core/src/desktop/js/ko/components/assist/ko.assistStoragePanel.js

@@ -225,6 +225,7 @@ class AssistStoragePanel {
     huePubSub.subscribe('assist.storage.go.home', () => {
       const path =
         this.activeSource().type === 's3' ||
+        this.activeSource().type === 'gs' ||
         this.activeSource().type === 'abfs' ||
         this.activeSource().type === 'ofs'
           ? '/'

+ 1 - 0
desktop/core/src/desktop/js/ko/components/ko.editorDroppableMenu.js

@@ -113,6 +113,7 @@ class EditorDroppableMenu extends DisposableComponent {
         let text = ui.helper.text();
         if (
           meta.type === 's3' ||
+          meta.type === 'gs' ||
           meta.type === 'hdfs' ||
           meta.type === 'adls' ||
           meta.type === 'abfs' ||

+ 3 - 0
desktop/core/src/desktop/js/sql/autocompleteResults.js

@@ -1423,6 +1423,9 @@ class AutocompleteResults {
     if (/^s3a:\/\//i.test(path)) {
       fetchFunction = 'fetchS3Path';
       path = path.substring(5);
+    } else if (/^gs:\/\//i.test(path)) {
+      fetchFunction = 'fetchGSPath';
+      path = path.substring(4);
     } else if (/^adl:\/\//i.test(path)) {
       fetchFunction = 'fetchAdlsPath';
       path = path.substring(5);

+ 99 - 0
desktop/core/src/desktop/lib/fs/gc/__init__.py

@@ -15,3 +15,102 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from __future__ import absolute_import
+
+from builtins import map
+from future.utils import raise_
+import calendar
+import errno
+import logging
+import posixpath
+import re
+import sys
+import time
+
+from functools import wraps
+
+from boto.exception import GSResponseError
+from hadoop.fs import normpath as fs_normpath
+
+LOG = logging.getLogger()
+
+
+ERRNO_MAP = {
+  403: errno.EACCES,
+  404: errno.ENOENT
+}
+DEFAULT_ERRNO = errno.EINVAL
+
+GS_PATH_RE = re.compile('^/*[gG][sS]://([^/]+)(/(.*?([^/]+)?/?))?$')
+GS_ROOT = 'gs://'
+
+
+def lookup_gserror(error):
+  err_no = ERRNO_MAP.get(error.status, DEFAULT_ERRNO)
+  return IOError(err_no, error.reason)
+
+
+def translate_gs_error(fn):
+  @wraps(fn)
+  def wrapped(*args, **kwargs):
+    try:
+      return fn(*args, **kwargs)
+    except GSResponseError:
+      _, exc, tb = sys.exc_info()
+      LOG.error('GS error: %s' % exc)
+      lookup = lookup_gserror(exc)
+      raise_(lookup.__class__, lookup, tb)
+  return wrapped
+
+
+def parse_uri(uri):
+  """Returns tuple (bucket_name, key_name, key_basename).
+  Raises ValueError if invalid GS URI is passed.
+  """
+  match = GS_PATH_RE.match(uri)
+  if not match:
+    raise ValueError("Invalid GS URI: %s" % uri)
+  key = match.group(3) or ''
+  basename = match.group(4) or ''
+  return match.group(1), key, basename
+
+def is_root(uri):
+  """Check if URI is GS root (gs://)."""
+  return uri.lower() == GS_ROOT
+
+
+def abspath(cd, uri):
+  """Returns absolute URI, examples:
+
+  abspath('gs://bucket/key', key2') == 'gs://bucket/key/key2'
+  abspath('gs://bucket/key', 'gs://bucket2/key2') == 'gs://bucket2/key2'
+  """
+  if cd.lower().startswith(GS_ROOT):
+    uri = join(cd, uri)
+  else:
+    uri = normpath(join(cd, uri))
+  return uri
+
+
+def join(*comp_list):
+  def _prep(uri):
+    try:
+      return '/%s/%s' % parse_uri(uri)[:2]
+    except ValueError:
+      return '/' if is_root(uri) else uri
+  joined = posixpath.join(*list(map(_prep, comp_list)))
+  if joined and joined[0] == '/':
+    joined = 'gs:/%s' % joined
+  return joined
+
+
+def normpath(path):
+  """Return normalized path but ignore leading GS_ROOT prefix if it exists."""
+  if path.lower().startswith(GS_ROOT):
+    if is_root(path):
+      normalized = path
+    else:
+      normalized = '%s%s' % (GS_ROOT, fs_normpath(path[len(GS_ROOT):]))
+  else:
+    normalized = fs_normpath(path)
+  return normalized

+ 5 - 8
desktop/core/src/desktop/lib/fs/gc/client.py

@@ -13,8 +13,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-from __future__ import absolute_import
-
 import logging
 LOG = logging.getLogger()
 
@@ -24,7 +22,6 @@ except ImportError:
   LOG.warning('gcs_oauth2_boto_plugin module not found')
 import json
 
-from aws.s3.s3fs import S3FileSystem
 from boto.auth_handler import AuthHandler, NotReadyToAuthenticate
 from boto.gs.bucket import Bucket
 from boto.gs.connection import GSConnection
@@ -34,6 +31,7 @@ from boto.s3.connection import SubdomainCallingFormat
 from desktop import conf
 from desktop.lib.idbroker import conf as conf_idbroker
 from desktop.lib.idbroker.client import IDBroker
+from desktop.lib.fs.gc.gs import GSFileSystem
 
 
 def get_credential_provider(config, user):
@@ -45,11 +43,10 @@ def _make_client(identifier, user):
   config = conf.GC_ACCOUNTS[identifier] if identifier in list(conf.GC_ACCOUNTS.keys()) else None
   client = Client.from_config(config, get_credential_provider(config, user))
 
-  return S3FileSystem(
+  return GSFileSystem(
     client.get_s3_connection(),
     client.expiration,
     headers={"x-goog-project-id": client.project},
-    filebrowser_action=conf.PERMISSION_ACTION_GS
   )  # It would be nice if the connection was lazy loaded
 
 
@@ -63,7 +60,7 @@ class Client(object):
   @classmethod
   def from_config(cls, config, credential_provider):
     credentials = credential_provider.get_credentials()
-    return Client(json_credentials=credentials.get('JsonCredentials'), expiration=credentials.get('Expiration', 0))
+    return Client(json_credentials=credentials.get('JsonCredentials'), expiration=credentials.get('Expiration'))
 
   def get_s3_connection(self):
     return HueGSConnection(provider=HueProvider('google', json_credentials=self.json_credentials))
@@ -124,7 +121,7 @@ class HueGSConnection(GSConnection):
 class CredentialProviderConf(object):
 
   def __init__(self, conf):
-    self._conf=conf
+    self._conf = conf
 
   def validate(self):
     credentials = self.get_credentials()
@@ -150,7 +147,7 @@ class CredentialProviderConf(object):
 class CredentialProviderIDBroker(object):
 
   def __init__(self, idbroker):
-    self.idbroker=idbroker
+    self.idbroker = idbroker
     self.credentials = None
 
   def validate(self):

+ 471 - 0
desktop/core/src/desktop/lib/fs/gc/gs.py

@@ -0,0 +1,471 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import re
+import logging
+import posixpath
+import time
+
+from boto.exception import BotoClientError, GSResponseError
+from boto.gs.connection import Location
+from boto.gs.key import Key
+
+from boto.s3.prefix import Prefix
+from django.utils.translation import gettext as _
+
+from desktop.conf import PERMISSION_ACTION_GS, GS_BULK_DELETE_DIR_KEYS_MAX_LIMIT
+from desktop.lib.fs.gc import GS_ROOT, abspath, parse_uri, translate_gs_error, normpath, join as gs_join
+from desktop.lib.fs.gc.gsstat import GSStat
+from desktop.lib.fs.gc.gsfile import open as gsfile_open
+
+from filebrowser.conf import REMOTE_STORAGE_HOME
+
+from aws.s3.s3fs import S3FileSystem
+
+
+DEFAULT_READ_SIZE = 1024 * 1024  # 1MB
+BUCKET_NAME_PATTERN = re.compile(
+  "^((?:(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9_\-]*[a-zA-Z0-9])\.)*(?:[A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9_\-]*[A-Za-z0-9]))$")
+
+
+LOG = logging.getLogger()
+
+class GSFileSystemException(IOError):
+  def __init__(self, *args, **kwargs):
+    super(GSFileSystemException, self).__init__(*args, **kwargs)
+
+
+class GSListAllBucketsException(GSFileSystemException):
+  def __init__(self, *args, **kwargs):
+    super(GSFileSystemException, self).__init__(*args, **kwargs)
+
+
+def auth_error_handler(view_fn):
+  def decorator(*args, **kwargs):
+    try:
+      return view_fn(*args, **kwargs)
+    except (GSResponseError, IOError) as e:
+      LOG.exception('GS error: ' + str(e))
+      if 'Forbidden' in str(e) or (hasattr(e, 'status') and e.status == 403):
+        path = kwargs.get('path')
+        if not path and len(args) > 1:
+          path = args[1]  # We assume that the path is the first argument
+        msg = _('User is not authorized to perform the attempted operation. Check that the user has appropriate permissions.')
+        if path:
+          msg = _('User is not authorized to write or modify path: %s. Check that the user has write permissions.') % path
+        raise GSFileSystemException(msg)
+      else:
+        msg = str(e)
+        if isinstance(e, GSResponseError):
+          msg = e.message or e.reason
+        raise GSFileSystemException(msg)
+    except Exception as e:
+      raise e
+  return decorator
+
+
+def get_gs_home_directory(user=None):
+  from desktop.models import _handle_user_dir_raz
+
+  remote_home_gs = 'gs://'
+  if hasattr(REMOTE_STORAGE_HOME, 'get') and REMOTE_STORAGE_HOME.get() and REMOTE_STORAGE_HOME.get().startswith('gs://'):
+    remote_home_gs = REMOTE_STORAGE_HOME.get()
+
+  remote_home_gs = _handle_user_dir_raz(user, remote_home_gs)
+
+  return remote_home_gs
+
+
+class GSFileSystem(S3FileSystem):
+
+  def __init__(self, gs_connection, expiration=None, fs='gs', headers=None, filebrowser_action=PERMISSION_ACTION_GS):
+    super().__init__(
+      gs_connection,
+      expiration=expiration,
+      fs=fs,
+      headers=headers,
+      filebrowser_action=filebrowser_action
+    )
+  
+  @staticmethod
+  def join(*comp_list):
+    return gs_join(*comp_list)
+
+  @staticmethod
+  def normpath(path):
+    return normpath(path)
+
+  def netnormpath(self, path):
+    return normpath(path)
+
+  @staticmethod
+  def parent_path(path):
+    """Get the parent path of a GS path.
+
+    Args:
+      path (str): The GS path for which to find the parent path.
+
+    Returns:
+      str: The parent path.
+    """
+    parent_dir = GSFileSystem._append_separator(path)
+
+    if not GSFileSystem.isroot(parent_dir):
+      bucket_name, key_name, basename = parse_uri(path)
+
+      if not basename:  # bucket is top-level, so return root
+        parent_dir = GS_ROOT
+      else:
+        bucket_path = '%s%s' % (GS_ROOT, bucket_name)
+        key_path = '/'.join(key_name.split('/')[:-1])
+        parent_dir = abspath(bucket_path, key_path)
+
+    return parent_dir
+  
+  @translate_gs_error
+  def stats(self, path):
+    """Get file or directory stats for a GS path.
+
+    Args:
+      path (str): The GS path to get stats for.
+
+    Returns:
+      GSStat: An object representing the stats of the file or directory.
+    
+    Raises:
+      GSFileSystemException: If the file or directory does not exist.
+    """
+    path = normpath(path)
+    stats = self._stats(path)
+    if stats:
+      return stats
+    raise GSFileSystemException("No such file or directory: '%s'" % path)
+
+  @translate_gs_error
+  @auth_error_handler
+  def create(self, path, overwrite=False, data=None):
+    """Create a file in GS at the specified path.
+
+    Args:
+      path (str): The GS path where the file should be created.
+      overwrite (bool): Whether to overwrite the file if it already exists.
+      data (str): The data to write to the file.
+
+    Raises:
+      Exception: If the create operation fails or some problem occurs when fetching the GS bucket or creating new key in it.
+    """
+    key = self._get_key(path)
+    if not key:
+      try:
+        bucket_name, key_name = parse_uri(path)[:2]
+        bucket = self._get_bucket(bucket_name)
+
+        key = bucket.new_key(key_name)
+      except Exception as e:
+        raise e
+
+    if key:
+      key.set_contents_from_string(data or '', replace=overwrite)
+    else:
+      raise Exception('Cannot perform create operation.')
+
+  def _get_key(self, path, validate=True):
+    bucket_name, key_name = parse_uri(path)[:2]
+    bucket = self._get_bucket(bucket_name)
+
+    try:
+      return bucket.get_key(key_name, headers=self.header_values)
+    except BotoClientError as e:
+      raise GSFileSystemException(_('Failed to access path at "%s": %s') % (path, e.reason))
+    except GSResponseError as e:
+      if e.status in (301, 400):
+        raise GSFileSystemException(_('Failed to access path: "%s" '
+          'Check that you have access to read this bucket and that the region is correct: %s') % (path, e.message or e.reason))
+      elif e.status == 403:
+        raise GSFileSystemException(_('User is not authorized to access path at "%s".' % path))
+      else:
+        raise GSFileSystemException(e.message or e.reason)
+    except GSResponseError as e:
+      raise e
+
+  @translate_gs_error
+  def open(self, path, mode='r'):
+    key = self._get_key(path)
+    if key is None:
+      raise GSFileSystemException("No such file or directory: '%s'" % path)
+    return gsfile_open(key, mode=mode)
+
+  @translate_gs_error
+  def listdir_stats(self, path, glob=None):
+    """List and get stats for files and directories in a GS bucket.
+    For path 'gs://', it gets stats for all listed buckets in GS filesystem.
+
+    Args:
+      path (str): The GS path to list.
+      glob (str, optional): Glob pattern for filtering files. Default is None.
+
+    Returns:
+      list of GSStat: A list of GSStat objects representing files and directories in the path.
+                      For 'gs://' path, it return a list of GSStat objects for all listed buckets.
+    """
+    if glob is not None:
+      raise NotImplementedError(_("Option `glob` is not implemented"))
+
+    if GSFileSystem.isroot(path):
+      # Return sorted stats of all listed buckets for path gs://
+      try:
+        return sorted(
+          [GSStat.from_bucket(b, self.fs) for b in self._s3_connection.get_all_buckets(headers=self.header_values)], key=lambda x: x.name)
+      except GSFileSystemException as e:
+        raise e
+      except GSResponseError as e:
+        if 'Forbidden' in str(e) or (hasattr(e, 'status') and e.status == 403):
+          raise GSListAllBucketsException(
+            _('You do not have permissions to list all buckets. Please specify a bucket name you have access to.'))
+        else:
+          raise GSFileSystemException(_('Failed to retrieve buckets: %s') % e.reason)
+      except Exception as e:
+        raise GSFileSystemException(('Failed to retrieve buckets: %s') % e)
+
+    bucket_name, prefix = parse_uri(path)[:2]
+    bucket = self._get_bucket(bucket_name)
+    prefix = self._append_separator(prefix)
+
+    res = []
+    for item in bucket.list(prefix=prefix, delimiter='/', headers=self.header_values):
+      if isinstance(item, Prefix):
+        res.append(GSStat.from_key(Key(item.bucket, item.name), is_dir=True, fs=self.fs))
+      else:
+        if item.name == prefix:
+          continue
+        res.append(self._stats_key(item, self.fs))
+
+    return res
+
+  @translate_gs_error
+  def listdir(self, path, glob=None):
+    return [parse_uri(x.path)[2] for x in self.listdir_stats(path, glob)]
+
+  @translate_gs_error
+  @auth_error_handler
+  def rmtree(self, path, skipTrash=True):
+    """Remove keys from GS filesystem.
+
+    Args:
+      path (str): The GS key path of the file or directory to remove.
+      skipTrash (bool): Whether to skip the trash when deleting.
+
+    Raises:
+      NotImplementedError: Since moving to trash is not implemented.
+      GSFileSystemException: If the removal operation fails.
+    """
+    if not skipTrash:
+      raise NotImplementedError(_('Moving to trash is not implemented for GS'))
+
+    bucket_name, key_name = parse_uri(path)[:2]
+    if bucket_name and not key_name:
+      self._delete_bucket(bucket_name)
+    else:
+      if self.isdir(path):
+        # Really need to make sure we end with a '/' for directory and it reflects in key_name
+        path = self._append_separator(path)
+        _, key_name = parse_uri(path)[:2]
+
+      key = self._get_key(path)
+      if key:
+        dir_keys = []
+        if self.isdir(path):
+          dir_keys = key.bucket.list(prefix=key_name)
+
+        if not dir_keys:
+          # Avoid Raz bulk delete issue
+          deleted_key = key.delete()
+          if deleted_key.exists():
+            raise GSFileSystemException('Could not delete key %s' % deleted_key)
+        else:
+          # key.bucket.delete_keys() call is not supported from GS side
+          # So, try deleting the all keys with directory prefix one by one
+
+          # TODO: Check on the UI side if key count is greater than max limit and show nice notification.
+          deleted_dir_key_count = 0
+          for key in list(dir_keys):
+            if deleted_dir_key_count > GS_BULK_DELETE_DIR_KEYS_MAX_LIMIT.get():
+              break
+
+            deleted_key = key.delete()
+            deleted_dir_key_count += 1
+
+  @translate_gs_error
+  @auth_error_handler
+  def mkdir(self, path, *args, **kwargs):
+    """Creates a directory and any parent directory if necessary.
+
+    Actually it creates an empty object: gs://[bucket]/[path]/
+    """
+    bucket_name, key_name = parse_uri(path)[:2]
+
+    if not BUCKET_NAME_PATTERN.match(bucket_name):
+      raise GSFileSystemException(_('Invalid bucket name: %s') % bucket_name)
+
+    try:
+      self._get_or_create_bucket(bucket_name)
+    except GSFileSystemException as e:
+      raise e
+    except GSResponseError as e:
+      raise GSFileSystemException(_('Failed to create GS bucket "%s": %s: %s') % (bucket_name, e.reason, e.body))
+    except Exception as e:
+      raise GSFileSystemException(_('Failed to create GS bucket "%s": %s') % (bucket_name, e))
+
+    stats = self._stats(path)
+    if stats:
+      if stats.isDir:
+        return None
+      else:
+        raise GSFileSystemException("'%s' already exists and is not a directory" % path)
+
+    path = self._append_separator(path)  # directory-key should ends by /
+    self.create(path)  # create empty object
+
+  def _stats(self, path):
+    if GSFileSystem.isroot(path):
+      return GSStat.for_gs_root()
+    
+    try:
+      key = self._get_key(path)
+    except BotoClientError as e:
+      raise GSFileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
+    except GSResponseError as e:
+      if e.status == 404:
+        return None
+      elif e.status == 403:
+        raise GSFileSystemException(_('User is not authorized to access path: "%s"') % path)
+      else:
+        raise GSFileSystemException(_('Failed to access path "%s": %s') % (path, e.reason))
+    except Exception as e: # SSL errors show up here, because they've been remapped in boto
+      raise GSFileSystemException(_('Failed to access path "%s": %s') % (path, str(e)))
+
+    if key is None:
+      bucket_name, key_name = parse_uri(path)[:2]
+      bucket = self._get_bucket(bucket_name)
+
+      key = Key(bucket, key_name)
+    
+    return self._stats_key(key, self.fs)
+  
+  @staticmethod
+  def _stats_key(key, fs='gs'):
+    if key.size is not None:
+      is_directory_name = not key.name or key.name[-1] == '/'
+
+      return GSStat.from_key(key, is_dir=is_directory_name, fs=fs)
+    else:
+      key.name = GSFileSystem._append_separator(key.name)
+      ls = key.bucket.get_all_keys(prefix=key.name, max_keys=1)  # Not sure possible via signed request
+
+      if len(ls) > 0:
+        return GSStat.from_key(key, is_dir=True, fs=fs)
+
+    return None
+
+  def _copy(self, src, dst, recursive, use_src_basename):
+    """Copy files and directories from a source GS path to a destination GS path.
+
+    Args:
+      src (str): The source GS path.
+      dst (str): The destination GS path.
+      recursive (bool): Whether to copy recursively for directories.
+      use_src_basename (bool): Whether to use the source basename when copying directories.
+
+    Returns:
+      None: If copying is successful.
+
+    Raises:
+      GSFileSystemException: If any errors occur during the copy operation.
+    """
+    src_st = self.stats(src)
+    if src_st.isDir and not recursive:
+      return None # omitting directory
+
+    # Check if the source is a directory and destination is not a directory
+    dst = abspath(src, dst)
+    dst_st = self._stats(dst)
+    if src_st.isDir and dst_st and not dst_st.isDir:
+      raise GSFileSystemException("Cannot overwrite non-directory '%s' with directory '%s'" % (dst, src))
+
+    # Skip operation if destination path is same as source path
+    if self._check_key_parent_path(src, dst):
+      raise GSFileSystemException('Destination path is same as the source path, skipping the operation.')
+
+    src_bucket, src_key = parse_uri(src)[:2]
+    dst_bucket, dst_key = parse_uri(dst)[:2]
+
+    keep_src_basename = use_src_basename and dst_st and dst_st.isDir
+    src_bucket = self._get_bucket(src_bucket)
+    dst_bucket = self._get_bucket(dst_bucket)
+
+    # Determine whether to keep the source basename when copying directories and 
+    # calculate the cut-off length for key names accordingly.
+    if keep_src_basename:
+      cut = len(posixpath.dirname(src_key))  # cut of the parent directory name
+      if cut:
+        cut += 1
+    else:
+      cut = len(src_key)
+      if not src_key.endswith('/'):
+        cut += 1
+
+    for key in src_bucket.list(prefix=src_key):
+      if not key.name.startswith(src_key):
+        raise GSFileSystemException(_("Invalid key to transform: %s") % key.name)
+
+      dst_name = posixpath.normpath(gs_join(dst_key, key.name[cut:]))
+
+      # Ensure directory paths end with a separator
+      if self.isdir(normpath(self.join(GS_ROOT, key.bucket.name, key.name))):
+        dst_name = self._append_separator(dst_name)
+
+      key.copy(dst_bucket, dst_name)
+
+  @translate_gs_error
+  @auth_error_handler
+  def rename(self, old, new):
+    """Rename a file or directory in GS.
+
+    Copies the content to the new key and then deletes the old one.
+    The new key is created if it didn't exists earlier.
+
+    Args:
+      old (str): The current GS path of the file or directory.
+      new (str): The new GS path to rename to.
+    """
+    new = abspath(old, new)
+
+    # Skip operation if destination path is same as source path
+    if not self._check_key_parent_path(old, new):
+      self.copy(old, new, recursive=True)
+      self.rmtree(old, skipTrash=True)
+    else:
+      raise GSFileSystemException('Destination path is same as source path, skipping the operation.')
+
+  @translate_gs_error
+  @auth_error_handler
+  def _check_key_parent_path(self, src, dst):
+    # Return True if parent path of source is same as destination path.
+    if GSFileSystem.parent_path(src) == dst:
+      return True
+    else:
+      return False

+ 50 - 0
desktop/core/src/desktop/lib/fs/gc/gsfile.py

@@ -0,0 +1,50 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import errno
+
+from boto.s3.keyfile import KeyFile
+from aws.s3.s3file import _ReadableS3File
+
+
+def open(key, mode='r'):
+  """Open a Google Cloud Storage (GS) file in read mode.
+
+  Args:
+    key: The GS key object.
+    mode (str): The mode for opening the file (default is 'r').
+
+  Returns:
+    _ReadableGSFile: A readable GS file object.
+      
+  Raises:
+    IOError: If an unsupported mode is provided.
+  """
+
+  if mode == 'r':
+    return _ReadableGSFile(key)
+  else:
+    raise IOError(errno.EINVAL, 'Unavailable mode "%s"' % mode)
+
+
+class _ReadableGSFile(_ReadableS3File):
+  """Readable GS file class.
+
+  This class extends _ReadableS3File for reading GS files.
+  """
+  def __init__(self, key):
+    key_copy = key.bucket.get_key(key.name)
+    KeyFile.__init__(self, key_copy)
+

+ 40 - 0
desktop/core/src/desktop/lib/fs/gc/gsfile_test.py

@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nose.tools import assert_true, assert_false, assert_equal, assert_raises
+from unittest.mock import Mock
+
+from desktop.lib.fs.gc.gsfile import open, _ReadableGSFile
+
+class TestGSFile(object):
+
+  def test_open_read_mode(self):
+    mock_gs_key = Mock()
+    mock_gs_key.name = "gethue_dir/test.csv"
+    mock_gs_key.bucket.get_key.return_value = mock_gs_key
+    
+    gs_file = open(mock_gs_key, mode='r')
+
+    assert_true(isinstance(gs_file, _ReadableGSFile))
+    mock_gs_key.bucket.get_key.assert_called_once_with('gethue_dir/test.csv')
+  
+  def test_open_invalid_mode(self):
+    mock_gs_key = Mock()
+    mock_gs_key.side_effect = IOError('Unavailable mode "w"')
+
+    assert_raises(IOError, open, mock_gs_key, 'w')
+

+ 100 - 0
desktop/core/src/desktop/lib/fs/gc/gsstat.py

@@ -0,0 +1,100 @@
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import posixpath
+
+from aws.s3 import s3datetime_to_timestamp
+from aws.s3.s3stat import S3Stat
+
+
+class GSStat(S3Stat):
+  """Custom class for Google Cloud Storage (GS) file statistics.
+
+  This class extends S3Stat and provides methods for creating GSStat objects from GS bucket and key objects.
+  """
+
+  def __init__(self, name, path, isDir, size, mtime):
+    super().__init__(
+      name=name,
+      path=path,
+      isDir=isDir,
+      size=size,
+      mtime=mtime
+    )
+
+
+  @classmethod
+  def from_bucket(cls, bucket, fs='gs'):
+    """Create a GSStat object from a GS bucket.
+
+    Args:
+      bucket: The GS bucket object.
+      fs (str): The file system (e.g., 'gs').
+
+    Returns:
+      GSStat: A GSStat object representing the GS bucket.
+    """
+    return cls(bucket.name, '%s://%s' % (fs, bucket.name), True, 0, None)
+
+
+  @classmethod
+  def from_key(cls, key, is_dir=False, fs='gs'):
+    """Create a GSStat object from a GS key object.
+
+    Args:
+      key: The GS key object.
+      is_dir (bool): True if the key represents a directory, False otherwise.
+      fs (str): The file system (e.g., 'gs').
+
+    Returns:
+      GSStat: A GSStat object representing the GS key.
+    """
+    if key.name:
+      name = posixpath.basename(key.name[:-1] if key.name[-1] == '/' else key.name)
+      path = '%s://%s/%s' % (fs, key.bucket.name, key.name)
+    else:
+      name = ''
+      path = '%s://%s' % (fs, key.bucket.name)
+
+    size = key.size or 0
+
+    gs_date = None
+    if key.last_modified is not None:
+      gs_date = key.last_modified
+    elif hasattr(key, 'date') and key.date is not None:
+      gs_date = key.date
+    mtime = s3datetime_to_timestamp(gs_date) if gs_date else None
+
+    return cls(name, path, is_dir, size, mtime)
+
+
+  @classmethod
+  def for_gs_root(cls):
+    """Create a GSStat object representing the root of the GS file system.
+
+    Returns:
+      GSStat: A GSStat object representing the root of the GS file system.
+    """
+    return cls('GS', 'gs://', True, 0, None)
+
+
+  def to_json_dict(self):
+    """Returns a dictionary representation of the GSStat object for easy serialization."""
+
+    keys = ('path', 'size', 'atime', 'mtime', 'mode', 'user', 'group', 'aclBit')
+    res = {}
+    for k in keys:
+      res[k] = self[k]
+    return res

+ 80 - 0
desktop/core/src/desktop/lib/fs/gc/gsstat_test.py

@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from nose.tools import assert_equal
+from unittest.mock import Mock
+
+from desktop.lib.fs.gc.gsstat import GSStat
+
+
+class TestGSStat(object):
+  def setUp(self):
+    self.mock_gs_bucket = Mock()
+    self.mock_gs_bucket.name = 'gethue_bucket'
+
+    self.mock_gs_key = Mock()
+    self.mock_gs_key.name = 'test.csv'
+    self.mock_gs_key.bucket = self.mock_gs_bucket
+    self.mock_gs_key.size = 123
+    self.mock_gs_key.last_modified = '2023-09-21T12:03:00.000Z' # Some mock timestamp
+
+
+  def test_from_bucket(self):
+    gs_bucket_stat = GSStat.from_bucket(self.mock_gs_bucket)
+
+    assert_equal(gs_bucket_stat.name, 'gethue_bucket')
+    assert_equal(gs_bucket_stat.path, 'gs://gethue_bucket')
+    assert_equal(gs_bucket_stat.isDir, True)
+    assert_equal(gs_bucket_stat.size, 0)
+    assert_equal(gs_bucket_stat.mtime, None)
+
+
+  def test_from_key(self):
+    gs_key_stat = GSStat.from_key(self.mock_gs_key)
+
+    assert_equal(gs_key_stat.name, 'test.csv')
+    assert_equal(gs_key_stat.path, 'gs://gethue_bucket/test.csv')
+    assert_equal(gs_key_stat.isDir, False)
+    assert_equal(gs_key_stat.size, 123)
+    assert_equal(gs_key_stat.mtime, 1695297780)  # Replace with the expected timestamp
+
+
+  def test_for_gs_root(self):
+    gs_root_stat = GSStat.for_gs_root()
+
+    assert_equal(gs_root_stat.name, 'GS')
+    assert_equal(gs_root_stat.path, 'gs://')
+    assert_equal(gs_root_stat.isDir, True)
+    assert_equal(gs_root_stat.size, 0)
+    assert_equal(gs_root_stat.mtime, None)
+
+
+  def test_to_json_dict(self):
+    gs_key_stat = GSStat.from_key(self.mock_gs_key)
+
+    json_dict = gs_key_stat.to_json_dict()
+    expected_dict = {
+      'path': 'gs://gethue_bucket/test.csv',
+      'size': 123,
+      'atime': 1695297780,
+      'mtime': 1695297780,
+      'mode': 33206,
+      'user': '',
+      'group': '',
+      'aclBit': False
+    }
+
+    assert_equal(json_dict, expected_dict)

+ 175 - 0
desktop/core/src/desktop/lib/fs/gc/upload.py

@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Classes for a custom upload handler to stream into GS.
+
+See http://docs.djangoproject.com/en/1.9/topics/http/file-uploads/
+"""
+
+from io import BytesIO as stream_io
+import logging
+
+from django.core.files.uploadedfile import SimpleUploadedFile
+from django.core.files.uploadhandler import FileUploadHandler, StopFutureHandlers, StopUpload, UploadFileException
+
+from desktop.lib.fsmanager import get_client
+from desktop.lib.fs.gc import parse_uri
+from desktop.lib.fs.gc.gs import GSFileSystemException
+
+
+LOG = logging.getLogger()
+
+DEFAULT_WRITE_SIZE = 1024 * 1024 * 50  # TODO: set in configuration (currently 50 MiB)
+
+class GSFileUploadError(UploadFileException):
+  pass
+
+class GSFileUploadHandler(FileUploadHandler):
+  """This handler is triggered by any upload field whose destination path starts with "GS" (case insensitive).
+
+  Streams data chunks directly to Google Cloud Storage (GS).
+  """
+
+  def __init__(self, request):
+    super().__init__(request)
+    self.chunk_size = DEFAULT_WRITE_SIZE
+    self.destination = request.GET.get('dest')  # GET param avoids infinite looping
+    self.target_path = None
+    self.file = None
+    self._request = request
+    self._mp = None
+    self._part_num = 1
+
+    if self._is_gs_upload():
+      self._fs = get_client(fs='gs', user=request.user.username)
+      self.bucket_name, self.key_name = parse_uri(self.destination)[:2]
+
+      # Verify that the path exists
+      self._fs._stats(self.destination)
+      self._bucket = self._fs._get_bucket(self.bucket_name)
+
+
+  def new_file(self, field_name, file_name, *args, **kwargs):
+    """Handle the start of a new file upload.
+
+    This method is called when a new file is encountered during the upload process.
+    """
+    if self._is_gs_upload():
+      super().new_file(field_name, file_name, *args, **kwargs)
+
+      LOG.info('Using GSFileUploadHandler to handle file upload.')
+      self.target_path = self._fs.join(self.key_name, file_name)
+
+      try:
+        # Check access permissions before attempting upload
+        self._check_access()
+
+        # Create a multipart upload request
+        LOG.debug("Initiating GS multipart upload to target path: %s" % self.target_path)
+        self._mp = self._bucket.initiate_multipart_upload(self.target_path)
+        self.file = SimpleUploadedFile(name=file_name, content='')
+
+        raise StopFutureHandlers()
+      except (GSFileUploadError, GSFileSystemException) as e:
+        LOG.error("Encountered error in GSUploadHandler check_access: %s" % e)
+        self.request.META['upload_failed'] = e
+        raise StopUpload()
+
+
+  def receive_data_chunk(self, raw_data, start):
+    """Receive and process a data chunk from the uploaded file.
+
+    This method is called for each data chunk received during the upload process.
+    """
+    if self._is_gs_upload():
+      try:
+        LOG.debug("GSFileUploadHandler uploading file part: %d" % self._part_num)
+        fp = self._get_file_part(raw_data)
+        self._mp.upload_part_from_file(fp=fp, part_num=self._part_num)
+        self._part_num += 1
+        return None
+      except Exception as e:
+        self._mp.cancel_upload()
+        LOG.exception('Failed to upload file to GS at %s: %s' % (self.target_path, e))
+        raise StopUpload()
+    else:
+      return raw_data
+
+
+  def file_complete(self, file_size):
+    """Finalize the file upload process.
+
+    This method is called when the entire file has been uploaded.
+    """
+    if self._is_gs_upload():
+      LOG.info("GSFileUploadHandler has completed file upload to GS, total file size is: %d." % file_size)
+      self._mp.complete_upload()
+      self.file.size = file_size
+      return self.file
+    else:
+      return None
+
+
+  def _is_gs_upload(self):
+    """Check if the upload destination is Google Cloud Storage (GS).
+
+    Returns:
+      bool: True if the destination is GS, False otherwise.
+    """
+    return self._get_scheme() and self._get_scheme().startswith('gs')
+
+
+  def _check_access(self):
+    """Check if the user has write access to the GS destination path.
+
+    Raises:
+      GSFileSystemException: If access permission is insufficient.
+    """
+    if not self._fs.check_access(self.destination, permission='WRITE'):
+      raise GSFileSystemException('Insufficient permissions to write to GS path "%s".' % self.destination)
+
+
+  def _get_scheme(self):
+    """Get the scheme (protocol) of the destination.
+
+    Returns:
+      str or None: The scheme (e.g., 'gs') if present in the destination, or None if not present.
+    """
+    if self.destination:
+      dst_parts = self.destination.split('://')
+      if dst_parts:
+        return dst_parts[0].lower()
+      else:
+        raise GSFileSystemException('Destination does not start with a valid scheme.')
+    else:
+      return None
+
+
+  def _get_file_part(self, raw_data):
+    """Create a file-like object from raw data.
+
+    Args:
+      raw_data (bytes): Raw data chunk.
+
+    Returns:
+      File-like object: A file-like object containing the raw data.
+    """
+    fp = stream_io()
+    fp.write(raw_data)
+    fp.seek(0)
+    return fp

+ 65 - 0
desktop/core/src/desktop/lib/fs/gc/upload_test.py

@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Licensed to Cloudera, Inc. under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  Cloudera, Inc. licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from nose.tools import assert_true, assert_false
+from desktop.lib.fs.gc.upload import GSFileUploadHandler
+
+from unittest.mock import patch, Mock
+
+
+class TestGSFileUploadHandler(object):
+
+  def test_is_gs_upload(self):
+    with patch('desktop.lib.fs.gc.upload.get_client') as get_client:
+      get_client.return_value = Mock()
+
+      # Check for gs path
+      request = Mock(GET={'dest': 'gs://buck1/key'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_true(upload_handler._is_gs_upload())
+
+      # Check for ofs path
+      request = Mock(GET={'dest': 'ofs://service-id/vol1/buck1/key'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_gs_upload())
+
+      # Check for s3a path
+      request = Mock(GET={'dest': 's3a://buck1/key'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_gs_upload())
+
+      # Check for abfs path
+      request = Mock(GET={'dest': 'abfs://container1/key'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_gs_upload())
+
+      # Check for hdfs path
+      request = Mock(GET={'dest': '/user/gethue'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_gs_upload())
+
+      request = Mock(GET={'dest': 'hdfs://user/gethue'})
+      upload_handler = GSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_gs_upload())
+

+ 6 - 0
desktop/core/src/desktop/lib/fs/ozone/upload_test.py

@@ -45,6 +45,12 @@ class TestOFSFileUploadHandler(object):
 
       assert_false(upload_handler._is_ofs_upload())
 
+      # Check for gs path
+      request = Mock(GET={'dest': 'gs://buck1/key'})
+      upload_handler = OFSFileUploadHandler(request)
+
+      assert_false(upload_handler._is_ofs_upload())
+
       # Check for abfs path
       request = Mock(GET={'dest': 'abfs://container1/key'})
       upload_handler = OFSFileUploadHandler(request)

+ 10 - 0
desktop/core/src/desktop/models.py

@@ -2032,6 +2032,16 @@ class ClusterConfig(object):
         'page': '/filebrowser/view=' + urllib_quote(home_path, safe=SAFE_CHARACTERS_URI_COMPONENTS)
       })
 
+    if 'filebrowser' in self.apps and fsmanager.is_enabled_and_has_access('gs', self.user):
+      home_path = remote_home_storage if remote_home_storage else 'gs://'.encode('utf-8')
+      interpreters.append({
+        'type': 'gs',
+        'displayName': _('GS'),
+        'buttonName': _('Browse'),
+        'tooltip': _('Google Storage'),
+        'page': '/filebrowser/view=' + urllib_quote(home_path, safe=SAFE_CHARACTERS_URI_COMPONENTS)
+      })
+
     if 'filebrowser' in self.apps and fsmanager.is_enabled_and_has_access('adl', self.user):
       home_path = remote_home_storage if remote_home_storage else 'adl:/'.encode('utf-8')
       interpreters.append({

+ 4 - 1
desktop/core/src/desktop/settings.py

@@ -37,7 +37,7 @@ from desktop.lib.python_util import force_dict_to_strings
 
 from aws.conf import is_enabled as is_s3_enabled
 from azure.conf import is_abfs_enabled
-from desktop.conf import is_ofs_enabled
+from desktop.conf import is_ofs_enabled, is_gs_enabled
 
 if sys.version_info[0] > 2:
   from django.utils.translation import gettext_lazy as _
@@ -663,6 +663,9 @@ file_upload_handlers = [
 if is_s3_enabled():
   file_upload_handlers.insert(0, 'aws.s3.upload.S3FileUploadHandler')
 
+if is_gs_enabled():
+  file_upload_handlers.insert(0, 'desktop.lib.fs.gc.upload.GSFileUploadHandler')
+
 if is_abfs_enabled():
   file_upload_handlers.insert(0, 'azure.abfs.upload.ABFSFileUploadHandler')
 

+ 3 - 2
desktop/libs/aws/src/aws/s3/s3fs.py

@@ -178,6 +178,7 @@ class S3FileSystem(object):
   def _get_key(self, path, validate=True):
     bucket_name, key_name = s3.parse_uri(path)[:2]
     bucket = self._get_bucket(bucket_name)
+
     try:
       return bucket.get_key(key_name, validate=validate)
     except BotoClientError as e:
@@ -198,7 +199,7 @@ class S3FileSystem(object):
       return Location.DEFAULT
 
   def _stats(self, path):
-    if s3.is_root(path):
+    if S3FileSystem.isroot(path):
       return S3Stat.for_s3_root()
 
     try:
@@ -259,7 +260,7 @@ class S3FileSystem(object):
   @staticmethod
   def parent_path(path):
     parent_dir = S3FileSystem._append_separator(path)
-    if not s3.is_root(parent_dir):
+    if not S3FileSystem.isroot(parent_dir):
       bucket_name, key_name, basename = s3.parse_uri(path)
       if not basename:  # bucket is top-level so return root
         parent_dir = S3A_ROOT