Skip to content

Commit

Permalink
Merge branch 'master' into chore/ccrypt_usersync
Browse files Browse the repository at this point in the history
  • Loading branch information
nss10 authored Jan 27, 2025
2 parents 29bf703 + 18e2acf commit 57b1b2b
Show file tree
Hide file tree
Showing 7 changed files with 69 additions and 36 deletions.
1 change: 1 addition & 0 deletions fence/blueprints/login/ras.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ def post_login(self, user=None, token_result=None, id_from_idp=None):
[passport],
pkey_cache=PKEY_CACHE,
db_session=current_app.scoped_session(),
skip_google_updates=True,
)
user_ids_from_passports = list(users_from_passports.keys())

Expand Down
11 changes: 6 additions & 5 deletions fence/resources/ga4gh/passports.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@


def sync_gen3_users_authz_from_ga4gh_passports(
passports,
pkey_cache=None,
db_session=None,
passports, pkey_cache=None, db_session=None, skip_google_updates=False
):
"""
Validate passports and embedded visas, using each valid visa's identity
Expand All @@ -49,6 +47,7 @@ def sync_gen3_users_authz_from_ga4gh_passports(
Args:
passports (list): a list of raw encoded passport strings, each
including header, payload, and signature
skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
Return:
list: a list of users, each corresponding to a valid visa identity
Expand Down Expand Up @@ -151,6 +150,7 @@ def sync_gen3_users_authz_from_ga4gh_passports(
ga4gh_visas=ga4gh_visas,
expiration=min_visa_expiration,
db_session=db_session,
skip_google_updates=skip_google_updates,
)
users_from_current_passport.append(gen3_user)

Expand Down Expand Up @@ -366,7 +366,7 @@ def get_or_create_gen3_user_from_iss_sub(issuer, subject_id, db_session=None):


def _sync_validated_visa_authorization(
gen3_user, ga4gh_visas, expiration, db_session=None
gen3_user, ga4gh_visas, expiration, db_session=None, skip_google_updates=False
):
"""
Wrapper around UserSyncer.sync_single_user_visas method, which parses
Expand All @@ -383,7 +383,7 @@ def _sync_validated_visa_authorization(
that are parsed
expiration (int): time at which synced Arborist policies and
inclusion in any GBAG are set to expire
skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
Return:
None
"""
Expand All @@ -398,6 +398,7 @@ def _sync_validated_visa_authorization(
ga4gh_visas,
db_session,
expires=expiration,
skip_google_updates=skip_google_updates,
)

# after syncing authorization, persist the visas that were parsed successfully.
Expand Down
52 changes: 34 additions & 18 deletions fence/sync/sync_users.py
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,9 @@ def sync_to_db_and_storage_backend(

sess.commit()

def sync_to_storage_backend(self, user_project, user_info, sess, expires):
def sync_to_storage_backend(
self, user_project, user_info, sess, expires, skip_google_updates=False
):
"""
sync user access control to storage backend with given expiration
Expand All @@ -947,7 +949,9 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires):
user_info (dict): a dictionary of attributes for a user.
sess: a sqlalchemy session
expires (int): time at which synced Arborist policies and
inclusion in any GBAG are set to expire
skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
Return:
None
"""
Expand Down Expand Up @@ -986,19 +990,19 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires):
# when updating users we want to maintain case sensitivity in the username so
# pass the original, non-lowered user_info dict
self._upsert_userinfo(sess, {user_info["username"].lower(): user_info})
if not skip_google_updates:
self._grant_from_storage(
to_add,
user_project_lowercase,
sess,
google_bulk_mapping=google_group_user_mapping,
expires=expires,
)

self._grant_from_storage(
to_add,
user_project_lowercase,
sess,
google_bulk_mapping=google_group_user_mapping,
expires=expires,
)

if config["GOOGLE_BULK_UPDATES"]:
self.logger.info("Updating user's google groups ...")
update_google_groups_for_users(google_group_user_mapping)
self.logger.info("Google groups update done!!")
if config["GOOGLE_BULK_UPDATES"]:
self.logger.info("Updating user's google groups ...")
update_google_groups_for_users(google_group_user_mapping)
self.logger.info("Google groups update done!!")

sess.commit()

Expand Down Expand Up @@ -1464,6 +1468,7 @@ def _process_user_projects(
dbgap_config,
sess,
):
user_projects_to_modify = copy.deepcopy(user_projects)
for username in user_projects.keys():
for project in user_projects[username].keys():
phsid = project.split(".")
Expand Down Expand Up @@ -1500,7 +1505,7 @@ def _process_user_projects(
privileges,
username,
sess,
user_projects,
user_projects_to_modify,
dbgap_config,
)

Expand All @@ -1511,9 +1516,11 @@ def _process_user_projects(
privileges,
username,
sess,
user_projects,
user_projects_to_modify,
dbgap_config,
)
for user in user_projects_to_modify.keys():
user_projects[user] = user_projects_to_modify[user]

def sync(self):
if self.session:
Expand Down Expand Up @@ -2412,7 +2419,9 @@ def _pick_sync_type(self, visa):

return sync_client

def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None):
def sync_single_user_visas(
self, user, ga4gh_visas, sess=None, expires=None, skip_google_updates=False
):
"""
Sync a single user's visas during login or DRS/data access
Expand All @@ -2427,6 +2436,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None):
sess (sqlalchemy.orm.session.Session): database session
expires (int): time at which synced Arborist policies and
inclusion in any GBAG are set to expire
skip_google_updates (bool): True if google group updates should be skipped. False if otherwise.
Return:
list of successfully parsed visas
Expand Down Expand Up @@ -2502,7 +2512,13 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None):

if user_projects:
self.logger.info("Sync to storage backend [sync_single_user_visas]")
self.sync_to_storage_backend(user_projects, info, sess, expires=expires)
self.sync_to_storage_backend(
user_projects,
info,
sess,
expires=expires,
skip_google_updates=skip_google_updates,
)
else:
self.logger.info("No users for syncing")

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "fence"
version = "10.4.1"
version = "10.4.2"
description = "Gen3 AuthN/AuthZ OIDC Service"
authors = ["CTDS UChicago <cdis@uchicago.edu>"]
license = "Apache-2.0"
Expand Down
22 changes: 18 additions & 4 deletions tests/dbgap_sync/test_user_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def test_sync(
"phs000178.c999": ["read", "read-storage"],
"phs000179.c1": ["read", "read-storage"],
"PROJECT-12345": ["read", "read-storage"],
"test_common_exchange_area": ["read-storage", "read"],
},
)
else:
Expand All @@ -154,6 +155,7 @@ def test_sync(
"phs000178.c2": ["read", "read-storage"],
"phs000178.c999": ["read", "read-storage"],
"phs000179.c1": ["read", "read-storage"],
"test_common_exchange_area": ["read", "read-storage"],
},
)

Expand Down Expand Up @@ -490,29 +492,40 @@ def test_sync_with_google_errors(syncer, monkeypatch):
syncer._update_arborist = MagicMock()
syncer._update_authz_in_arborist = MagicMock()

with patch("fence.sync.sync_users.update_google_groups_for_users") as mock_bulk_update:
with patch(
"fence.sync.sync_users.update_google_groups_for_users"
) as mock_bulk_update:
mock_bulk_update.side_effect = GoogleUpdateException("Something's Wrong!")
with pytest.raises(GoogleUpdateException):
syncer.sync()

syncer._update_arborist.assert_called()
syncer._update_authz_in_arborist.assert_called()


@patch("fence.sync.sync_users.paramiko.SSHClient")
@patch("os.makedirs")
@patch("os.path.exists", return_value=False)
@pytest.mark.parametrize("syncer", ["google", "cleversafe"], indirect=True)
def test_sync_with_sftp_connection_errors(mock_path, mock_makedir, mock_ssh_client, syncer, monkeypatch):
def test_sync_with_sftp_connection_errors(
mock_path, mock_makedir, mock_ssh_client, syncer, monkeypatch
):
"""
Verifies that when there is an sftp connection error connection, that the connection is retried the max amount of
tries as configured by DEFAULT_BACKOFF_SETTINGS
"""
monkeypatch.setattr(syncer, "is_sync_from_dbgap_server", True)
mock_ssh_client.return_value.__enter__.return_value.connect.side_effect = Exception("Authentication timed out")
mock_ssh_client.return_value.__enter__.return_value.connect.side_effect = Exception(
"Authentication timed out"
)
# usersync System Exits if any exception is raised during download.
with pytest.raises(SystemExit):
syncer.sync()
assert mock_ssh_client.return_value.__enter__.return_value.connect.call_count == DEFAULT_BACKOFF_SETTINGS['max_tries']
assert (
mock_ssh_client.return_value.__enter__.return_value.connect.call_count
== DEFAULT_BACKOFF_SETTINGS["max_tries"]
)


@pytest.mark.parametrize("syncer", ["google", "cleversafe"], indirect=True)
def test_sync_from_files(syncer, db_session, storage_client):
Expand Down Expand Up @@ -1063,6 +1076,7 @@ def test_revoke_all_policies_no_user(db_session, syncer):
# we only care that this doesn't error
assert True


@pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True)
def test_revoke_all_policies_preserve_mfa(monkeypatch, db_session, syncer):
"""
Expand Down
3 changes: 2 additions & 1 deletion tests/test-fence-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -345,13 +345,14 @@ dbGaP:
# have access to the parent study's "common exchange area data" that is not study
# specific. The following config is whether or not to parse/handle "c999" codes
# for exchange area data
enable_common_exchange_area_access: false
enable_common_exchange_area_access: true
# The below configuration is a mapping from studies to their "common exchange area data"
# Fence project name a user gets access to when parsing c999 exchange area codes (and
# subsequently gives access to an arborist resource representing this common area
# as well)
study_common_exchange_areas:
'phs000178': 'test_common_exchange_area'
'phs000298': 'test_common_exchange_area_2'
# A mapping from the dbgap study / Fence project to which authorization namespaces the
# actual data lives in. For example, `studyX` data may exist in multiple organizations, so
# we need to know to map authorization to all orgs resources
Expand Down
14 changes: 7 additions & 7 deletions tests/test_drs.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def test_passport_use_disabled(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
Expand Down Expand Up @@ -520,7 +520,7 @@ def test_get_presigned_url_for_non_public_data_with_passport(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
Expand Down Expand Up @@ -678,7 +678,7 @@ def test_get_presigned_url_with_passport_with_incorrect_authz(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": int(time.time()) + 1001,
},
Expand Down Expand Up @@ -974,7 +974,7 @@ def test_passport_cache_valid_passport(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": current_time + 1000,
},
Expand Down Expand Up @@ -1187,7 +1187,7 @@ def test_passport_cache_invalid_passport(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": current_time + 1000,
},
Expand Down Expand Up @@ -1404,7 +1404,7 @@ def test_passport_cache_expired_in_memory_valid_in_db(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": current_time + 1000,
},
Expand Down Expand Up @@ -1635,7 +1635,7 @@ def test_passport_cache_expired(
"phs_id": "phs000298",
"version": "v4",
"participant_set": "p3",
"consent_group": "c1",
"consent_group": "c999",
"role": "designated user",
"expiration": current_time + 2,
},
Expand Down

0 comments on commit 57b1b2b

Please sign in to comment.