diff --git a/fence/blueprints/login/ras.py b/fence/blueprints/login/ras.py index 31bd7a340..911cf86dd 100644 --- a/fence/blueprints/login/ras.py +++ b/fence/blueprints/login/ras.py @@ -79,6 +79,7 @@ def post_login(self, user=None, token_result=None, id_from_idp=None): [passport], pkey_cache=PKEY_CACHE, db_session=current_app.scoped_session(), + skip_google_updates=True, ) user_ids_from_passports = list(users_from_passports.keys()) diff --git a/fence/resources/ga4gh/passports.py b/fence/resources/ga4gh/passports.py index 9b8a84179..c971d3634 100644 --- a/fence/resources/ga4gh/passports.py +++ b/fence/resources/ga4gh/passports.py @@ -34,9 +34,7 @@ def sync_gen3_users_authz_from_ga4gh_passports( - passports, - pkey_cache=None, - db_session=None, + passports, pkey_cache=None, db_session=None, skip_google_updates=False ): """ Validate passports and embedded visas, using each valid visa's identity @@ -49,6 +47,7 @@ def sync_gen3_users_authz_from_ga4gh_passports( Args: passports (list): a list of raw encoded passport strings, each including header, payload, and signature + skip_google_updates (bool): True if google group updates should be skipped. False if otherwise. Return: list: a list of users, each corresponding to a valid visa identity @@ -151,6 +150,7 @@ def sync_gen3_users_authz_from_ga4gh_passports( ga4gh_visas=ga4gh_visas, expiration=min_visa_expiration, db_session=db_session, + skip_google_updates=skip_google_updates, ) users_from_current_passport.append(gen3_user) @@ -366,7 +366,7 @@ def get_or_create_gen3_user_from_iss_sub(issuer, subject_id, db_session=None): def _sync_validated_visa_authorization( - gen3_user, ga4gh_visas, expiration, db_session=None + gen3_user, ga4gh_visas, expiration, db_session=None, skip_google_updates=False ): """ Wrapper around UserSyncer.sync_single_user_visas method, which parses @@ -383,7 +383,7 @@ def _sync_validated_visa_authorization( that are parsed expiration (int): time at which synced Arborist policies and inclusion in any GBAG are set to expire - + skip_google_updates (bool): True if google group updates should be skipped. False if otherwise. Return: None """ @@ -398,6 +398,7 @@ def _sync_validated_visa_authorization( ga4gh_visas, db_session, expires=expiration, + skip_google_updates=skip_google_updates, ) # after syncing authorization, persist the visas that were parsed successfully. diff --git a/fence/sync/sync_users.py b/fence/sync/sync_users.py index 5a11c4ac6..69273acf5 100644 --- a/fence/sync/sync_users.py +++ b/fence/sync/sync_users.py @@ -659,9 +659,13 @@ def _parse_csv(self, file_dict, sess, dbgap_config={}, encrypted=True): tags["pi"] = row["downloader for names"] user_info[username] = { - "email": row.get("email") or user_info[username].get('email') or "", + "email": row.get("email") + or user_info[username].get("email") + or "", "display_name": display_name, - "phone_number": row.get("phone") or user_info[username].get('phone_number') or "", + "phone_number": row.get("phone") + or user_info[username].get("phone_number") + or "", "tags": tags, } @@ -937,7 +941,9 @@ def sync_to_db_and_storage_backend( sess.commit() - def sync_to_storage_backend(self, user_project, user_info, sess, expires): + def sync_to_storage_backend( + self, user_project, user_info, sess, expires, skip_google_updates=False + ): """ sync user access control to storage backend with given expiration @@ -953,7 +959,9 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires): user_info (dict): a dictionary of attributes for a user. sess: a sqlalchemy session - + expires (int): time at which synced Arborist policies and + inclusion in any GBAG are set to expire + skip_google_updates (bool): True if google group updates should be skipped. False if otherwise. Return: None """ @@ -967,10 +975,10 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires): google_group_user_mapping = {} get_or_create_proxy_group_id( expires=expires, - user_id=user_info['user_id'], - username=user_info['username'], + user_id=user_info["user_id"], + username=user_info["username"], session=sess, - storage_manager=self.storage_manager + storage_manager=self.storage_manager, ) # TODO: eventually it'd be nice to remove this step but it's required @@ -987,14 +995,11 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires): for project, _ in projects.items(): syncing_user_project_list.add((username.lower(), project)) - to_add = set(syncing_user_project_list) # when updating users we want to maintain case sensitivity in the username so # pass the original, non-lowered user_info dict - self._upsert_userinfo(sess, { - user_info['username'].lower(): user_info - }) + self._upsert_userinfo(sess, {user_info["username"].lower(): user_info}) self._grant_from_storage( to_add, @@ -1004,7 +1009,7 @@ def sync_to_storage_backend(self, user_project, user_info, sess, expires): expires=expires, ) - if config["GOOGLE_BULK_UPDATES"]: + if config["GOOGLE_BULK_UPDATES"] and not skip_google_updates: self.logger.info("Updating user's google groups ...") update_google_groups_for_users(google_group_user_mapping) self.logger.info("Google groups update done!!") @@ -1473,6 +1478,7 @@ def _process_user_projects( dbgap_config, sess, ): + user_projects_to_modify = copy.deepcopy(user_projects) for username in user_projects.keys(): for project in user_projects[username].keys(): phsid = project.split(".") @@ -1509,7 +1515,7 @@ def _process_user_projects( privileges, username, sess, - user_projects, + user_projects_to_modify, dbgap_config, ) @@ -1520,9 +1526,11 @@ def _process_user_projects( privileges, username, sess, - user_projects, + user_projects_to_modify, dbgap_config, ) + for user in user_projects_to_modify.keys(): + user_projects[user] = user_projects_to_modify[user] def sync(self): if self.session: @@ -2421,7 +2429,9 @@ def _pick_sync_type(self, visa): return sync_client - def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): + def sync_single_user_visas( + self, user, ga4gh_visas, sess=None, expires=None, skip_google_updates=False + ): """ Sync a single user's visas during login or DRS/data access @@ -2436,6 +2446,7 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): sess (sqlalchemy.orm.session.Session): database session expires (int): time at which synced Arborist policies and inclusion in any GBAG are set to expire + skip_google_updates (bool): True if google group updates should be skipped. False if otherwise. Return: list of successfully parsed visas @@ -2485,8 +2496,8 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): projects = {**projects, **project} parsed_visas.append(visa) - info['user_id'] = user.id - info['username'] = user.username + info["user_id"] = user.id + info["username"] = user.username user_projects[user.username] = projects user_projects = self.parse_projects(user_projects) @@ -2512,7 +2523,11 @@ def sync_single_user_visas(self, user, ga4gh_visas, sess=None, expires=None): if user_projects: self.logger.info("Sync to storage backend [sync_single_user_visas]") self.sync_to_storage_backend( - user_projects, info, sess, expires=expires + user_projects, + info, + sess, + expires=expires, + skip_google_updates=skip_google_updates, ) else: self.logger.info("No users for syncing") diff --git a/tests/dbgap_sync/test_user_sync.py b/tests/dbgap_sync/test_user_sync.py index f85cc28e5..13b07d45b 100644 --- a/tests/dbgap_sync/test_user_sync.py +++ b/tests/dbgap_sync/test_user_sync.py @@ -143,6 +143,7 @@ def test_sync( "phs000178.c999": ["read", "read-storage"], "phs000179.c1": ["read", "read-storage"], "PROJECT-12345": ["read", "read-storage"], + "test_common_exchange_area": ["read-storage", "read"], }, ) else: @@ -154,6 +155,7 @@ def test_sync( "phs000178.c2": ["read", "read-storage"], "phs000178.c999": ["read", "read-storage"], "phs000179.c1": ["read", "read-storage"], + "test_common_exchange_area": ["read", "read-storage"], }, ) @@ -490,7 +492,9 @@ def test_sync_with_google_errors(syncer, monkeypatch): syncer._update_arborist = MagicMock() syncer._update_authz_in_arborist = MagicMock() - with patch("fence.sync.sync_users.update_google_groups_for_users") as mock_bulk_update: + with patch( + "fence.sync.sync_users.update_google_groups_for_users" + ) as mock_bulk_update: mock_bulk_update.side_effect = GoogleUpdateException("Something's Wrong!") with pytest.raises(GoogleUpdateException): syncer.sync() @@ -498,21 +502,30 @@ def test_sync_with_google_errors(syncer, monkeypatch): syncer._update_arborist.assert_called() syncer._update_authz_in_arborist.assert_called() + @patch("fence.sync.sync_users.paramiko.SSHClient") @patch("os.makedirs") @patch("os.path.exists", return_value=False) @pytest.mark.parametrize("syncer", ["google", "cleversafe"], indirect=True) -def test_sync_with_sftp_connection_errors(mock_path, mock_makedir, mock_ssh_client, syncer, monkeypatch): +def test_sync_with_sftp_connection_errors( + mock_path, mock_makedir, mock_ssh_client, syncer, monkeypatch +): """ Verifies that when there is an sftp connection error connection, that the connection is retried the max amount of tries as configured by DEFAULT_BACKOFF_SETTINGS """ monkeypatch.setattr(syncer, "is_sync_from_dbgap_server", True) - mock_ssh_client.return_value.__enter__.return_value.connect.side_effect = Exception("Authentication timed out") + mock_ssh_client.return_value.__enter__.return_value.connect.side_effect = Exception( + "Authentication timed out" + ) # usersync System Exits if any exception is raised during download. with pytest.raises(SystemExit): syncer.sync() - assert mock_ssh_client.return_value.__enter__.return_value.connect.call_count == DEFAULT_BACKOFF_SETTINGS['max_tries'] + assert ( + mock_ssh_client.return_value.__enter__.return_value.connect.call_count + == DEFAULT_BACKOFF_SETTINGS["max_tries"] + ) + @pytest.mark.parametrize("syncer", ["google", "cleversafe"], indirect=True) def test_sync_from_files(syncer, db_session, storage_client): @@ -1063,6 +1076,7 @@ def test_revoke_all_policies_no_user(db_session, syncer): # we only care that this doesn't error assert True + @pytest.mark.parametrize("syncer", ["cleversafe", "google"], indirect=True) def test_revoke_all_policies_preserve_mfa(monkeypatch, db_session, syncer): """ diff --git a/tests/test-fence-config.yaml b/tests/test-fence-config.yaml index 38ccbd147..6fcb80ee2 100755 --- a/tests/test-fence-config.yaml +++ b/tests/test-fence-config.yaml @@ -345,13 +345,14 @@ dbGaP: # have access to the parent study's "common exchange area data" that is not study # specific. The following config is whether or not to parse/handle "c999" codes # for exchange area data - enable_common_exchange_area_access: false + enable_common_exchange_area_access: true # The below configuration is a mapping from studies to their "common exchange area data" # Fence project name a user gets access to when parsing c999 exchange area codes (and # subsequently gives access to an arborist resource representing this common area # as well) study_common_exchange_areas: 'phs000178': 'test_common_exchange_area' + 'phs000298': 'test_common_exchange_area_2' # A mapping from the dbgap study / Fence project to which authorization namespaces the # actual data lives in. For example, `studyX` data may exist in multiple organizations, so # we need to know to map authorization to all orgs resources diff --git a/tests/test_drs.py b/tests/test_drs.py index 1b1e5cda5..9e95aaab6 100644 --- a/tests/test_drs.py +++ b/tests/test_drs.py @@ -361,7 +361,7 @@ def test_passport_use_disabled( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": int(time.time()) + 1001, }, @@ -520,7 +520,7 @@ def test_get_presigned_url_for_non_public_data_with_passport( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": int(time.time()) + 1001, }, @@ -678,7 +678,7 @@ def test_get_presigned_url_with_passport_with_incorrect_authz( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": int(time.time()) + 1001, }, @@ -974,7 +974,7 @@ def test_passport_cache_valid_passport( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": current_time + 1000, }, @@ -1187,7 +1187,7 @@ def test_passport_cache_invalid_passport( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": current_time + 1000, }, @@ -1404,7 +1404,7 @@ def test_passport_cache_expired_in_memory_valid_in_db( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": current_time + 1000, }, @@ -1635,7 +1635,7 @@ def test_passport_cache_expired( "phs_id": "phs000298", "version": "v4", "participant_set": "p3", - "consent_group": "c1", + "consent_group": "c999", "role": "designated user", "expiration": current_time + 2, },