Skip to content

Commit

Permalink
Feat: improve share views - add filters (data-dot-all#885)
Browse files Browse the repository at this point in the history
### Feature or Bugfix
- Feature

### Detail
- Implements part 1 and 3 of the design in data-dot-all#644 
- It also handles the changes needed for point 6 to keep working

### Relates
- data-dot-all#644 

### Security
Please answer the questions below briefly where applicable, or write
`N/A`. Based on
[OWASP 10](https://owasp.org/Top10/en/).

- Does this PR introduce or modify any input fields or queries - this
includes
fetching data from storage outside the application (e.g. a database, an
S3 bucket)? -- Yes, it introduces a new GraphQL Query: listAllGroups
  - Is the input sanitized? -- N/A as no input is needed
- What precautions are you taking before deserializing the data you
consume?
  - Is injection prevented by parametrizing queries?
  - Have you ensured no `eval` or similar functions are used?
- Does this PR introduce any functionality or component that requires
authorization?
- How have you ensured it respects the existing AuthN/AuthZ mechanisms?
  - Are you logging failed auth attempts?
- Are you using or adding any cryptographic features?
  - Do you use a standard proven implementations?
  - Are the used keys controlled by the customer? Where are they stored?
- Are you introducing any new policies/roles/users?
  - Have you used the least-privilege principle? How?


By submitting this pull request, I confirm that my contribution is made
under the terms of the Apache 2.0 license.
  • Loading branch information
dlpzx authored Nov 27, 2023
1 parent f11517a commit 8e9b7a1
Show file tree
Hide file tree
Showing 27 changed files with 974 additions and 548 deletions.
2 changes: 1 addition & 1 deletion backend/dataall/core/environment/api/input_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,6 @@ class EnvironmentSortField(GraphQLEnumMapper):
gql.Argument('term', gql.String),
gql.Argument(name='page', type=gql.Integer),
gql.Argument(name='pageSize', type=gql.Integer),
gql.Argument('groupUri', gql.NonNullableType(gql.String)),
gql.Argument(name='groupUri', type=gql.String),
],
)
18 changes: 18 additions & 0 deletions backend/dataall/core/environment/api/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,24 @@
resolver=list_all_environment_groups,
)

listAllGroups = gql.QueryField(
name='listAllGroups',
type=gql.Ref('GroupSearchResult'),
args=[
gql.Argument(name='filter', type=gql.Ref('GroupFilter')),
],
resolver=list_groups,
)

listAllConsumptionRoles = gql.QueryField(
name='listAllConsumptionRoles',
type=gql.Ref('ConsumptionRoleSearchResult'),
args=[
gql.Argument(name='filter', type=gql.Ref('ConsumptionRoleFilter')),
],
resolver=list_consumption_roles,
)

listEnvironmentConsumptionRoles = gql.QueryField(
name='listEnvironmentConsumptionRoles',
type=gql.Ref('ConsumptionRoleSearchResult'),
Expand Down
19 changes: 19 additions & 0 deletions backend/dataall/core/environment/api/resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,25 @@ def list_valid_environments(context: Context, source, filter=None):
return EnvironmentService.list_valid_user_environments(session, filter)


def list_groups(context: Context, source, filter=None):
if filter is None:
filter = {}
with context.engine.scoped_session() as session:
return EnvironmentService.paginated_user_groups(session, filter)


def list_consumption_roles(
context: Context, source, environmentUri=None, filter=None
):
if filter is None:
filter = {}
with context.engine.scoped_session() as session:
return EnvironmentService.paginated_user_consumption_roles(
session=session,
data=filter,
)


def list_environment_networks(
context: Context, source, environmentUri=None, filter=None
):
Expand Down
58 changes: 58 additions & 0 deletions backend/dataall/core/environment/services/environment_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -540,6 +540,64 @@ def list_valid_user_environments(session, data=None) -> dict:
'nodes': valid_environments,
}

@staticmethod
def query_user_groups(session, username, groups, filter) -> Query:
query = (
session.query(EnvironmentGroup)
.filter(EnvironmentGroup.groupUri.in_(groups))
.distinct(EnvironmentGroup.groupUri)
)
if filter and filter.get('term'):
term = filter['term']
query = query.filter(
or_(
EnvironmentGroup.groupUri.ilike('%' + term + '%'),
)
)
return query

@staticmethod
def paginated_user_groups(session, data=None) -> dict:
context = get_context()
return paginate(
query=EnvironmentService.query_user_groups(session, context.username, context.groups, data),
page=data.get('page', 1),
page_size=data.get('pageSize', 5),
).to_dict()

@staticmethod
def query_user_consumption_roles(session, username, groups, filter) -> Query:
query = (
session.query(ConsumptionRole)
.filter(ConsumptionRole.groupUri.in_(groups))
.distinct(ConsumptionRole.consumptionRoleName)
)
if filter and filter.get('term'):
term = filter['term']
query = query.filter(
or_(
ConsumptionRole.consumptionRoleName.ilike('%' + term + '%'),
)
)
if filter and filter.get('groupUri'):
print("filter group")
group = filter['groupUri']
query = query.filter(
or_(
ConsumptionRole.groupUri == group,
)
)
return query

@staticmethod
def paginated_user_consumption_roles(session, data=None) -> dict:
context = get_context()
return paginate(
query=EnvironmentService.query_user_consumption_roles(session, context.username, context.groups, data),
page=data.get('page', 1),
page_size=data.get('pageSize', 5),
).to_dict()

@staticmethod
def query_user_environment_groups(session, groups, uri, filter) -> Query:
query = (
Expand Down
8 changes: 5 additions & 3 deletions backend/dataall/modules/dataset_sharing/api/input_types.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from dataall.base.api.constants import *
from dataall.core.organizations.api.enums import OrganisationUserRole
from dataall.modules.dataset_sharing.api.enums import ShareableType, ShareSortField


Expand Down Expand Up @@ -54,8 +53,11 @@
gql.Argument('sort', gql.ArrayType(ShareSortCriteria)),
gql.Argument('page', gql.Integer),
gql.Argument('pageSize', gql.Integer),
gql.Argument('roles', gql.ArrayType(OrganisationUserRole.toGraphQLEnum())),
gql.Argument('tags', gql.ArrayType(gql.String)),
gql.Argument('status', gql.ArrayType(gql.String)),
gql.Argument('dataset_owners', gql.ArrayType(gql.String)),
gql.Argument('datasets_uris', gql.ArrayType(gql.String)),
gql.Argument('share_requesters', gql.ArrayType(gql.String)),
gql.Argument('share_iam_roles', gql.ArrayType(gql.String)),
],
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,32 @@ def list_user_received_share_requests(session, username, groups, data=None):
)
)
)

if data and data.get('status'):
if len(data.get('status')) > 0:
query = query.filter(
ShareObject.status.in_(data.get('status'))
)
if data and data.get('dataset_owners'):
if len(data.get('dataset_owners')) > 0:
query = query.filter(
Dataset.SamlAdminGroupName.in_(data.get('dataset_owners'))
)
if data and data.get('datasets_uris'):
if len(data.get('datasets_uris')) > 0:
query = query.filter(
ShareObject.datasetUri.in_(data.get('datasets_uris'))
)
if data and data.get('share_requesters'):
if len(data.get('share_requesters')) > 0:
query = query.filter(
ShareObject.groupUri.in_(data.get('share_requesters'))
)
if data and data.get('share_iam_roles'):
if len(data.get('share_iam_roles')) > 0:
query = query.filter(
ShareObject.principalIAMRoleName.in_(data.get('share_iam_roles'))
)
return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict()

@staticmethod
Expand All @@ -600,6 +626,10 @@ def list_user_sent_share_requests(session, username, groups, data=None):
Environment,
Environment.environmentUri == ShareObject.environmentUri,
)
.join(
Dataset,
Dataset.datasetUri == ShareObject.datasetUri,
)
.filter(
or_(
ShareObject.owner == username,
Expand All @@ -610,6 +640,31 @@ def list_user_sent_share_requests(session, username, groups, data=None):
)
)
)
if data and data.get('status'):
if len(data.get('status')) > 0:
query = query.filter(
ShareObject.status.in_(data.get('status'))
)
if data and data.get('dataset_owners'):
if len(data.get('dataset_owners')) > 0:
query = query.filter(
Dataset.SamlAdminGroupName.in_(data.get('dataset_owners'))
)
if data and data.get('datasets_uris'):
if len(data.get('datasets_uris')) > 0:
query = query.filter(
ShareObject.datasetUri.in_(data.get('datasets_uris'))
)
if data and data.get('share_requesters'):
if len(data.get('share_requesters')) > 0:
query = query.filter(
ShareObject.groupUri.in_(data.get('share_requesters'))
)
if data and data.get('share_iam_roles'):
if len(data.get('share_iam_roles')) > 0:
query = query.filter(
ShareObject.principalIAMRoleName.in_(data.get('share_iam_roles'))
)
return paginate(query, data.get('page', 1), data.get('pageSize', 10)).to_dict()

@staticmethod
Expand Down
13 changes: 11 additions & 2 deletions backend/dataall/modules/datasets/api/dataset/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
from dataall.modules.datasets.api.dataset.input_types import DatasetFilter
from dataall.modules.datasets.api.dataset.resolvers import (
get_dataset,
list_datasets,
list_owned_shared_datasets,
list_owned_datasets,
get_dataset_assume_role_url,
get_file_upload_presigned_url,
list_dataset_share_objects,
Expand All @@ -24,7 +25,15 @@
name='listDatasets',
args=[gql.Argument('filter', DatasetFilter)],
type=DatasetSearchResult,
resolver=list_datasets,
resolver=list_owned_shared_datasets,
test_scope='Dataset',
)

listOwnedDatasets = gql.QueryField(
name='listOwnedDatasets',
args=[gql.Argument('filter', DatasetFilter)],
type=DatasetSearchResult,
resolver=list_owned_datasets,
test_scope='Dataset',
)

Expand Down
10 changes: 8 additions & 2 deletions backend/dataall/modules/datasets/api/dataset/resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,16 @@ def get_file_upload_presigned_url(
return DatasetService.get_file_upload_presigned_url(uri=datasetUri, data=input)


def list_datasets(context: Context, source, filter: dict = None):
def list_owned_shared_datasets(context: Context, source, filter: dict = None):
if not filter:
filter = {'page': 1, 'pageSize': 5}
return DatasetService.list_datasets(filter)
return DatasetService.list_owned_shared_datasets(filter)


def list_owned_datasets(context: Context, source, filter: dict = None):
if not filter:
filter = {'page': 1, 'pageSize': 5}
return DatasetService.list_owned_datasets(filter)


def list_locations(context, source: Dataset, filter: dict = None):
Expand Down
10 changes: 9 additions & 1 deletion backend/dataall/modules/datasets/services/dataset_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,21 @@ def get_file_upload_presigned_url(uri: str, data: dict):
return S3DatasetClient(dataset).get_file_upload_presigned_url(data)

@staticmethod
def list_datasets(data: dict):
def list_owned_shared_datasets(data: dict):
context = get_context()
with context.db_engine.scoped_session() as session:
return ShareObjectRepository.paginated_user_datasets(
session, context.username, context.groups, data=data
)

@staticmethod
def list_owned_datasets(data: dict):
context = get_context()
with context.db_engine.scoped_session() as session:
return DatasetRepository.paginated_user_datasets(
session, context.username, context.groups, data=data
)

@staticmethod
def list_locations(dataset_uri, data: dict):
with get_context().db_engine.scoped_session() as session:
Expand Down
32 changes: 31 additions & 1 deletion backend/dataall/modules/datasets_base/db/dataset_repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from sqlalchemy import and_, or_
from sqlalchemy.orm import Query

from dataall.core.activity.db.activity_models import Activity
from dataall.core.environment.services.environment_service import EnvironmentService
from dataall.core.organizations.db.organization_repositories import Organization
Expand Down Expand Up @@ -348,6 +347,37 @@ def list_group_datasets(session, environment_id, group_uri):
.all()
)

@staticmethod
def paginated_user_datasets(
session, username, groups, data=None
) -> dict:
return paginate(
query=DatasetRepository._query_user_datasets(session, username, groups, data),
page=data.get('page', 1),
page_size=data.get('pageSize', 10),
).to_dict()

@staticmethod
def _query_user_datasets(session, username, groups, filter) -> Query:
query = (
session.query(Dataset)
.filter(
or_(
Dataset.owner == username,
Dataset.SamlAdminGroupName.in_(groups),
Dataset.stewards.in_(groups),
)
)
)
if filter and filter.get('term'):
query = query.filter(
or_(
Dataset.description.ilike(filter.get('term') + '%%'),
Dataset.label.ilike(filter.get('term') + '%%'),
)
)
return query.distinct(Dataset.datasetUri)

@staticmethod
def _set_import_data(dataset, data):
dataset.imported = True if data.get('imported') else False
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/modules/Datasets/services/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ export * from './deleteDataset';
export * from './generateDatasetAccessToken';
export * from './getDatasetPresignedUrl';
export * from './importDataset';
export * from './listDatasetStorageLocations';
export * from './listDatasets';
export * from './listDatasetStorageLocations';
export * from './startGlueCrawler';
export * from './syncTables';
export * from './updateDataset';
5 changes: 2 additions & 3 deletions frontend/src/modules/Datasets/views/DatasetView.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ import {
import { SET_ERROR, useDispatch } from 'globalErrors';
import { getDataset, countUpVotes, getVote, upVote, useClient } from 'services';
import { deleteDataset } from '../services';
import { ShareInboxList } from 'modules/Shares';
import { ShareBoxList } from 'modules/Shares';
import {
FeedComments,
KeyValueTagList,
Expand Down Expand Up @@ -66,7 +66,6 @@ const DatasetView = () => {
const [upVotes, setUpvotes] = useState(null);
const [stack, setStack] = useState(null);
const [openFeed, setOpenFeed] = useState(false);

const getTabs = () => {
const tabs = [
{
Expand Down Expand Up @@ -337,7 +336,7 @@ const DatasetView = () => {
<DatasetOverview dataset={dataset} isAdmin={isAdmin} />
)}
{isAdmin && currentTab === 'shares' && (
<ShareInboxList dataset={dataset} />
<ShareBoxList tab={'inbox'} dataset={dataset} />
)}
{isAdmin && currentTab === 'upload' && (
<DatasetUpload dataset={dataset} isAdmin={isAdmin} />
Expand Down
Loading

0 comments on commit 8e9b7a1

Please sign in to comment.