diff --git a/.secrets.baseline b/.secrets.baseline index 9aefa253..3686d6e3 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -7,6 +7,9 @@ { "name": "AWSKeyDetector" }, + { + "name": "AzureStorageKeyDetector" + }, { "name": "Base64HighEntropyString", "limit": 4.5 @@ -17,9 +20,15 @@ { "name": "CloudantDetector" }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, { "name": "HexHighEntropyString", - "limit": 3 + "limit": 3.0 }, { "name": "IbmCloudIamDetector" @@ -37,15 +46,24 @@ { "name": "MailchimpDetector" }, + { + "name": "NpmDetector" + }, { "name": "PrivateKeyDetector" }, + { + "name": "SendGridDetector" + }, { "name": "SlackDetector" }, { "name": "SoftlayerDetector" }, + { + "name": "SquareOAuthDetector" + }, { "name": "StripeDetector" }, @@ -57,10 +75,6 @@ { "path": "detect_secrets.filters.allowlist.is_line_allowlisted" }, - { - "path": "detect_secrets.filters.common.is_baseline_file", - "filename": ".secrets.baseline" - }, { "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", "min_level": 2 @@ -215,6 +229,22 @@ "line_number": 125 } ], + "docs/migration_to_single_table_indexd.md": [ + { + "type": "Secret Keyword", + "filename": "docs/migration_to_single_table_indexd.md", + "hashed_secret": "ed8883d3d8dc2e3c7fc5d14cc2e8830dd27e8f96", + "is_verified": false, + "line_number": 26 + }, + { + "type": "Basic Auth Credentials", + "filename": "docs/migration_to_single_table_indexd.md", + "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", + "is_verified": false, + "line_number": 57 + } + ], "migrations/versions/15f2e9345ade_create_tables.py": [ { "type": "Hex High Entropy String", @@ -239,7 +269,7 @@ "filename": "tests/default_test_settings.py", "hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3", "is_verified": false, - "line_number": 40 + "line_number": 39 } ], "tests/postgres/migrations/test_15f2e9345ade_create_tables.py": [ @@ -266,7 +296,7 @@ "filename": "tests/postgres/migrations/test_legacy_schema_migration.py", "hashed_secret": "5666c088b494f26cd8f63ace013992f5fc391ce0", "is_verified": false, - "line_number": 88 + "line_number": 91 } ], "tests/test_aliases_endpoints.py": [ @@ -291,28 +321,28 @@ "filename": "tests/test_bundles.py", "hashed_secret": "fd66f51cba49640055a05a6173764b5f0241c63e", "is_verified": false, - "line_number": 137 + "line_number": 143 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "168762db39e35d49d630689f2ff453b5813a9255", "is_verified": false, - "line_number": 152 + "line_number": 160 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "c5f0378cf93d896ecc394150943f13afa16ba766", "is_verified": false, - "line_number": 174 + "line_number": 184 }, { "type": "Hex High Entropy String", "filename": "tests/test_bundles.py", "hashed_secret": "a2ca8b84f631b40d866b8e376d077da3527b1fe4", "is_verified": false, - "line_number": 177 + "line_number": 187 } ], "tests/test_client.py": [ @@ -328,70 +358,70 @@ "filename": "tests/test_client.py", "hashed_secret": "15a6d8daad1278efcaadc0d6e3d1dd2d9ebbc262", "is_verified": false, - "line_number": 1084 + "line_number": 1121 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1b0d1a618b5c213dd792bbc3aa96ffa6bc370ef3", "is_verified": false, - "line_number": 1300 + "line_number": 1345 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "1170ace44158ff189902ff44597efef121623353", "is_verified": false, - "line_number": 1731 + "line_number": 1792 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "ff9c79b737b3ea7386618cc9437d3fb0a772182b", "is_verified": false, - "line_number": 2408 + "line_number": 2507 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "c8176f1e75e62e15dabaa4087fb7194451c8f6d2", "is_verified": false, - "line_number": 2411 + "line_number": 2510 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "d5198f8eddb1cbeb437899cd99e5ee97ab8531b4", "is_verified": false, - "line_number": 2411 + "line_number": 2510 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "02dc196562514eaa3e2feac1f441ccf6ad81e09d", "is_verified": false, - "line_number": 2415 + "line_number": 2514 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "f1cb2d91a95165a2ab909eadd9f7b65f312c7e2d", "is_verified": false, - "line_number": 2416 + "line_number": 2515 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "58db546de03270b55a4c889a5c5e6296b29fef25", "is_verified": false, - "line_number": 2417 + "line_number": 2516 }, { "type": "Hex High Entropy String", "filename": "tests/test_client.py", "hashed_secret": "b6c0bd08fde409c18760f32bef8705191840c402", "is_verified": false, - "line_number": 2418 + "line_number": 2517 } ], "tests/test_deprecated_aliases_endpoints.py": [ @@ -409,9 +439,9 @@ "filename": "tests/test_drs.py", "hashed_secret": "5666c088b494f26cd8f63ace013992f5fc391ce0", "is_verified": false, - "line_number": 38 + "line_number": 39 } ] }, - "generated_at": "2023-09-27T23:03:38Z" + "generated_at": "2024-09-09T17:22:44Z" } diff --git a/bin/indexd_settings.py b/bin/indexd_settings.py index bcc67985..ce0b7476 100644 --- a/bin/indexd_settings.py +++ b/bin/indexd_settings.py @@ -4,6 +4,7 @@ from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver APP_NAME = "indexd" @@ -24,6 +25,8 @@ def load_json(file_name): CONFIG["JSONIFY_PRETTYPRINT_REGULAR"] = False +USE_SINGLE_TABLE = False + dist = environ.get("DIST", None) if dist: CONFIG["DIST"] = json.loads(dist) @@ -32,18 +35,33 @@ def load_json(file_name): if drs_service_info: CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) -CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( - "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( - usr=usr, - psw=psw, - pghost=pghost, - pgport=pgport, - db=db, +if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, ), - index_config=index_config, - ), -} + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, + ), + } + CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( @@ -68,4 +86,4 @@ def load_json(file_name): arborist="http://localhost/", ) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} diff --git a/bin/migrate_to_single_table.py b/bin/migrate_to_single_table.py new file mode 100644 index 00000000..7a1d30d0 --- /dev/null +++ b/bin/migrate_to_single_table.py @@ -0,0 +1,371 @@ +""" +to run: python migrate_to_single_table.py --creds-path /dir/containing/db_creds --start-did +""" +import argparse +import backoff +import json +import bin.config_helper as config_helper +from cdislogging import get_logger +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm import sessionmaker +import re + +from indexd.index.drivers.alchemy import ( + IndexRecord, + IndexRecordAuthz, + IndexRecordAlias, + IndexRecordUrl, + IndexRecordACE, + IndexRecordMetadata, + IndexRecordUrlMetadata, + IndexRecordHash, +) +from indexd.index.drivers.single_table_alchemy import Record +from indexd.index.errors import MultipleRecordsFound + +APP_NAME = "indexd" + +logger = get_logger("migrate_single_table", log_level="debug") + + +def load_json(file_name): + return config_helper.load_json(file_name, APP_NAME) + + +def main(): + args = parse_args() + migrator = IndexRecordMigrator(creds_file=args.creds_file) + migrator.index_record_to_new_table( + offset=args.start_offset, last_seen_guid=args.start_did + ) + return + + +def parse_args(): + parser = argparse.ArgumentParser( + description="Migrate data from old indexd database to new single table database" + ) + parser.add_argument( + "--creds-file", + dest="creds_file", + help="file to the creds file for the database you're trying to copy data from multi-table to single records table. Defaults to original indexd database creds from the indexd block in the creds.json file.", + ) + parser.add_argument( + "--start-did", + dest="start_did", + help="did to start at", + default=None, + ) + parser.add_argument( + "--start-offset", + dest="start_offset", + help="offset to start at", + default=None, + ) + return parser.parse_args() + + +class IndexRecordMigrator: + def __init__(self, creds_file=None): + self.logger = get_logger("migrate_single_table", log_level="debug") + + conf_data = load_json(creds_file) if creds_file else load_json("creds.json") + + usr = conf_data.get("db_username", "{{db_username}}") + db = conf_data.get("db_database", "{{db_database}}") + psw = conf_data.get("db_password", "{{db_password}}") + pghost = conf_data.get("db_host", "{{db_host}}") + pgport = 5432 + + try: + engine = create_engine( + f"postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}" + ) + except Exception as e: + self.logger.error(f"Failed to connect to postgres: {e}") + Base = declarative_base() + Base.metadata.create_all(engine) + Session = sessionmaker(bind=engine) + + self.session = Session() + + def index_record_to_new_table( + self, batch_size=1000, offset=None, last_seen_guid=None + ): + """ + Collect records from index_record table, collect additional info from multiple tables and bulk insert to new record table. + """ + try: + self.total_records = self.session.query(IndexRecord).count() + self.count = 0 + + while True: + if last_seen_guid is None: + records = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .limit(batch_size) + .all() + ) + elif offset is not None: + records = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .offset(offset - 1) + .limit(batch_size) + .all() + ) + else: + records = ( + self.session.query(IndexRecord) + .order_by(IndexRecord.did) + .filter(IndexRecord.did > last_seen_guid) + .limit(batch_size) + .all() + ) + + if not records: + break + + try: + records_to_insert = self.get_info_from_mult_tables(records) + self.bulk_insert_records(records_to_insert) + except Exception as e: + raise Exception( + f"Could not insert records with {e} at offset {offset} with the last seen guid {last_seen_guid}. Please re-run the job with the following --start-did {last_seen_guid}" + ) + + last_seen_guid = records[-1].did + + except Exception as e: + self.session.rollback() + self.logger.error( + f"Error in migration: {e}. Last seen guid: {last_seen_guid} at position: {self.count}." + ) + finally: + self.session.close() + new_total_records = self.session.query(Record).count() + self.logger.info(f"Number of records in old table: {self.total_records}") + self.logger.info(f"Number of records in new table: {new_total_records}") + if self.total_records == new_total_records: + self.logger.info( + "Number of records in the new table matches the number of records in old table" + ) + else: + self.logger.info( + "Number of records in the new table DOES NOT MATCH the number of records in old table. Check logs to see if there are records that were not migrated" + ) + self.logger.info("Finished migrating :D") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=60, jitter=backoff.full_jitter + ) + def bulk_insert_records(self, records_to_insert): + """ + bulk insert records into the new Record table + Args: + records_to_insert (list): List of Record objects + """ + try: + self.session.bulk_save_objects(records_to_insert) + self.session.commit() + self.count += len(records_to_insert) + self.logger.info( + f"Done processing {self.count}/{self.total_records} records. {(self.count * 100)/self.total_records}%" + ) + except IntegrityError as e: + self.session.rollback() + self.logger.error(f"Duplicate record found for records {e}") + except Exception as e: + self.session.rollback() + self.logger.error(f"Error bulk insert for records at {self.count} records") + + def get_info_from_mult_tables(self, records): + """ + Collect records from multiple tables from old multi table infrastructure and create a list of records to insert into the new single table infrastructure + + Args: + records (list): list of IndexRecord objects + + Returns: + records_to_insert (list): List of Record objects + """ + records_to_insert = [] + for record in records: + hashes = self.get_index_record_hash(record.did) + urls = self.get_urls_record(record.did) + url_metadata = self.get_urls_metadata(record.did) + acl = self.get_index_record_ace(record.did) + authz = self.get_index_record_authz(record.did) + alias = self.get_index_record_alias(record.did) + metadata = self.get_index_record_metadata(record.did) + records_to_insert.append( + Record( + guid=record.did, + baseid=record.baseid, + rev=record.rev, + form=record.form, + size=record.size, + created_date=record.created_date, + updated_date=record.updated_date, + content_created_date=record.content_created_date, + content_updated_date=record.content_updated_date, + file_name=record.file_name, + version=record.version, + uploader=record.uploader, + hashes=hashes, + urls=urls, + url_metadata=url_metadata, + acl=acl, + authz=authz, + alias=alias, + record_metadata=metadata, + ) + ) + return records_to_insert + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_index_record_hash(self, did): + """ + Get the index record hash for the given did and return correctly formatted value + """ + try: + stmt = ( + self.session.query( + IndexRecordHash.hash_type, + IndexRecordHash.hash_value, + ) + .filter(IndexRecordHash.did == did) + .all() + ) + res = {hash_type: hash_value for hash_type, hash_value in stmt} + return res + except Exception as e: + raise Exception(f"Error with hash for {did}: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_urls_record(self, did): + """ + Get the urls record for the given did and return correctly formatted value + """ + try: + stmt = ( + self.session.query(IndexRecordUrl.url) + .filter(IndexRecordUrl.did == did) + .all() + ) + res = [u.url for u in stmt] + return res + except Exception as e: + raise Exception(f"Error with urls for {did}: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_urls_metadata(self, did): + """ + Get the urls metadata for the given did and return correctly formatted value + """ + try: + stmt = ( + self.session.query( + IndexRecordUrlMetadata.url, + IndexRecordUrlMetadata.key, + IndexRecordUrlMetadata.value, + ) + .filter(IndexRecordUrlMetadata.did == did) + .all() + ) + res = {url: {key: value} for url, key, value in stmt} + return res + except Exception as e: + raise Exception(f"Error with url metadata for {did}: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_index_record_ace(self, did): + """ + Get the index record ace for the given did and return correctly formatted value + """ + try: + stmt = ( + self.session.query(IndexRecordACE.ace) + .filter(IndexRecordACE.did == did) + .all() + ) + res = [a.ace for a in stmt] + return res + except Exception as e: + raise Exception(f"Error with ace for did {did}: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_index_record_authz(self, did): + """ + Get the index record authz for the given did and return the correctly formatted value + """ + try: + stmt = ( + self.session.query(IndexRecordAuthz.resource) + .filter(IndexRecordAuthz.did == did) + .all() + ) + res = [r.resource for r in stmt] + return res + except Exception as e: + raise Exception(f"Error with authz: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_index_record_alias(self, did): + """ + Get the index record alias for the given did and return the correctly formatted + """ + try: + stmt = ( + self.session.query(IndexRecordAlias.name) + .filter(IndexRecordAlias.did == did) + .all() + ) + res = {} + for did, name in stmt: + if did not in res: + res[did] = [] + res[did].append(name) + return res + except Exception as e: + raise Exception(f"Error with alias: {e}") + + @backoff.on_exception( + backoff.expo, Exception, max_tries=5, max_time=10, jitter=backoff.full_jitter + ) + def get_index_record_metadata(self, did): + """ + Get the index record metadata for the given did and return the correctly fortmatted value + """ + try: + stmt = ( + self.session.query( + IndexRecordMetadata.key, + IndexRecordMetadata.value, + ) + .filter(IndexRecordMetadata.did == did) + .all() + ) + res = {key: value for key, value in stmt} + return res + except Exception as e: + raise Exception(f"Error with alias for did {did}: {e}") + + +if __name__ == "__main__": + main() diff --git a/deployment/Secrets/indexd_settings.py b/deployment/Secrets/indexd_settings.py index e7572d4f..5baab416 100644 --- a/deployment/Secrets/indexd_settings.py +++ b/deployment/Secrets/indexd_settings.py @@ -1,10 +1,10 @@ from os import environ import json import config_helper -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver - +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver APP_NAME = "indexd" @@ -23,6 +23,8 @@ def load_json(file_name): index_config = conf_data.get("index_config") CONFIG = {} +USE_SINGLE_TABLE = False + CONFIG["JSONIFY_PRETTYPRINT_REGULAR"] = False dist = environ.get("DIST", None) @@ -33,18 +35,33 @@ def load_json(file_name): if drs_service_info: CONFIG["DRS_SERVICE_INFO"] = json.loads(drs_service_info) -CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( - "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( - usr=usr, - psw=psw, - pghost=pghost, - pgport=pgport, - db=db, +if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, ), - index_config=index_config, - ), -} + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql+psycopg2://{usr}:{psw}@{pghost}:{pgport}/{db}".format( + usr=usr, + psw=psw, + pghost=pghost, + pgport=pgport, + db=db, + ), + index_config=index_config, + ), + } + CONFIG["ALIAS"] = { "driver": SQLAlchemyAliasDriver( @@ -69,4 +86,4 @@ def load_json(file_name): arborist="http://arborist-service/", ) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} diff --git a/docs/local_dev_environment.md b/docs/local_dev_environment.md index ac7b79ce..b6dc47ea 100644 --- a/docs/local_dev_environment.md +++ b/docs/local_dev_environment.md @@ -193,10 +193,10 @@ poetry run pytest -vv --cov=indexd --cov-report xml tests You may also need to update the [test settings](./tests/default_test_settings.py) with the appropriate database connection information prior to running the tests. ```python -settings["config"]["TEST_DB"] = "postgres://{username}:{password}@localhost:{port}/indexd_tests" +settings["config"]["TEST_DB"] = "postgresql://{username}:{password}@localhost:{port}/indexd_tests" ``` -> If you are using Azure Postgresql, you will need to include the `username@hostname` for the `username` in the connection string. You may also need to include support for SSL in the connection string, e.g. `postgres://{username@hostname}:{password}@serverfqdn:{port}/{dbname}?sslmode=require`. +> If you are using Azure Postgresql, you will need to include the `username@hostname` for the `username` in the connection string. You may also need to include support for SSL in the connection string, e.g. `postgresql://{username@hostname}:{password}@serverfqdn:{port}/{dbname}?sslmode=require`. > Further, you may run into `sqlite` errors; it may be helpful to rename existing local `*.sq3` files before running `pytest`. ## Administration diff --git a/docs/migration_to_single_table_indexd.md b/docs/migration_to_single_table_indexd.md new file mode 100644 index 00000000..b8f0320b --- /dev/null +++ b/docs/migration_to_single_table_indexd.md @@ -0,0 +1,103 @@ +# Running Data Migration for Single Table Indexd + +## A. Prepare Database and Configuration +1. **Deploy the version of IndexD** that contains Single Table Indexd. Alembic, used for database migrations, should create a new table named `records` in the IndexD database. Note that this is a database migration and NOT a data migration. +2. **Create clone database:** +``` + # create a new database + gen3 psql indexd -c 'create database indexd_new' + # dump old db and restore it on the new one + gen3 db backup indexd | psql -U $indexd_user -h -d indexd_new +``` + +**If you don’t have permissions:** + +a. Run `gen3 db creds indexd` + +b. Using the information from above, run `gen3 psql $g3Farmserver -c "alter user $username createdb;"` + +3. **Update credentials:** After Creating the backup database, update the `Gen3Secrets/creds.json` to include the credentials for the new database. Add a new block named `indexd_new` with the credentials for the new database. Copy configuration from indexd. Run `gen3 kube-setup-secrets` to create the secrets in kube secrets. The new database can be accessed by using `gen3 psql indexd_new` + +4. **Update cloud automation script:** `~/cloud-automation/kube/services/indexd/indexd-deploy.yaml` +``` + volumes: + - name: creds-volume-new + secret: + secretName: "indexd_new" + volumeMounts: + - name: "creds-volume-new" + readOnly: True + mountPath: "var/www/indexd/new_creds.json" + subPath: creds.json +``` +After updating the cloud-auto script, run `gen3 roll indexd` + +## B. Run Database Migration +Run the cloud-automation migration job, `indexd-single-table-migration-job.yaml` + +To run: +``` + gen3 job run indexd-single-table-migration-job +``` + +**If a job stops in the middle of migration** for any reason, the job should return the last seen guid; re-run the job with the `START_DID` parameter: + +``` + gen3 job run indexd-single-table-migration-job START_DID +``` + +## C. Swap IndexD to use the clone database: +Go to the indexd settings under `cloud-automation/apis_config/indexd_settings.py` +Change the config `CONFIG[“INDEX”]` + +**From:** +``` + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +``` + +**To:** +``` + from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + + USE_SINGLE_TABLE = True + + if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } + else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +``` + +Import `from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver` and add the driver `SingleTableSQLAlchemyIndexDriver` similar to `SQLAlchemyIndexDriver`. Wrap those around an `if` statement like shown above and add a new configuration `USE_SINGLE_TABLE` to make it easier to swap between the two drivers. + +## D. Swap the current running database with the snapshot: +In `creds.json`, you should have an indexd block and an indexd_new block. Swap them out, `gen3 kube-setup-secrets` and `gen3 roll indexd` diff --git a/indexd/default_settings.py b/indexd/default_settings.py index f57f1054..75d26271 100644 --- a/indexd/default_settings.py +++ b/indexd/default_settings.py @@ -1,6 +1,8 @@ from .index.drivers.alchemy import SQLAlchemyIndexDriver from .alias.drivers.alchemy import SQLAlchemyAliasDriver from .auth.drivers.alchemy import SQLAlchemyAuthDriver +from .index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + CONFIG = {} @@ -9,26 +11,46 @@ # Key to lock the database during migrations CONFIG["DB_MIGRATION_POSTGRES_LOCK_KEY"] = 100 +USE_SINGLE_TABLE = False + # - DEFAULT_PREFIX: prefix to be prepended. # - PREPEND_PREFIX: the prefix is preprended to the generated GUID when a # new record is created WITHOUT a provided GUID. # - ADD_PREFIX_ALIAS: aliases are created for new records - "". # Do NOT set both ADD_PREFIX_ALIAS and PREPEND_PREFIX to True, or aliases # will be created as "". -CONFIG["INDEX"] = { - "driver": SQLAlchemyIndexDriver( - "sqlite:///index.sq3", +if USE_SINGLE_TABLE is True: + CONFIG["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } +else: + CONFIG["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } + +CONFIG["ALIAS"] = { + "driver": SQLAlchemyAliasDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret echo=True, - index_config={ - "DEFAULT_PREFIX": "testprefix:", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, ) } -CONFIG["ALIAS"] = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3", echo=True)} - CONFIG["DIST"] = [ { @@ -61,6 +83,8 @@ }, } -AUTH = SQLAlchemyAuthDriver("sqlite:///auth.sq3") +AUTH = SQLAlchemyAuthDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret +) -settings = {"config": CONFIG, "auth": AUTH} +settings = {"config": CONFIG, "auth": AUTH, "use_single_table": USE_SINGLE_TABLE} diff --git a/indexd/index/blueprint.py b/indexd/index/blueprint.py index c7fb3c32..7c79513f 100644 --- a/indexd/index/blueprint.py +++ b/indexd/index/blueprint.py @@ -104,10 +104,8 @@ def get_index(form=None): validate_hashes(**hashes) hashes = hashes if hashes else None - metadata = flask.request.args.getlist("metadata") metadata = {k: v for k, v in (x.split(":", 1) for x in metadata)} - acl = flask.request.args.get("acl") if acl is not None: acl = [] if acl == "null" else acl.split(",") @@ -186,8 +184,8 @@ def get_index(form=None): "authz": authz, "hashes": hashes, "metadata": metadata, + "urls_metadata": urls_metadata, } - return flask.jsonify(base), 200 @@ -375,7 +373,6 @@ def get_index_record(record): """ Returns a record. """ - ret = blueprint.index_driver.get_with_nonstrict_prefix(record) return flask.jsonify(ret), 200 @@ -487,7 +484,6 @@ def add_index_blank_record_version(record): did, baseid, rev = blueprint.index_driver.add_blank_version( record, new_did=new_did, uploader=uploader, file_name=file_name, authz=authz ) - ret = {"did": did, "baseid": baseid, "rev": rev} return flask.jsonify(ret), 201 @@ -538,7 +534,6 @@ def put_index_record(record): # authorize done in update did, baseid, rev = blueprint.index_driver.update(record, rev, json) - ret = {"did": did, "baseid": baseid, "rev": rev} return flask.jsonify(ret), 200 @@ -737,7 +732,6 @@ def post_bundle(): for checksum in flask.request.json.get("checksums") } validate_hashes(**hashes) - # get bundles/records that already exists and add it to bundle_data for bundle in bundles: data = get_index_record(bundle)[0] diff --git a/indexd/index/drivers/alchemy.py b/indexd/index/drivers/alchemy.py index 54bd865e..d9f36a46 100644 --- a/indexd/index/drivers/alchemy.py +++ b/indexd/index/drivers/alchemy.py @@ -676,7 +676,7 @@ def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): for r in query ] - def _validate_and_format_content_dates( + def _validate_and_set_content_dates( self, record, content_created_date, content_updated_date ): if content_created_date is not None: @@ -769,7 +769,7 @@ def add( record.description = description - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, @@ -1371,8 +1371,6 @@ def add_version( record.file_name = file_name record.version = version record.description = description - record.content_created_date = content_created_date - record.content_updated_date = content_updated_date record.urls = [IndexRecordUrl(did=record.did, url=url) for url in urls] @@ -1393,7 +1391,7 @@ def add_version( for m_key, m_value in metadata.items() ] - self._validate_and_format_content_dates( + self._validate_and_set_content_dates( record=record, content_created_date=content_created_date, content_updated_date=content_updated_date, diff --git a/indexd/index/drivers/query/urls.py b/indexd/index/drivers/query/urls.py index 18eea35b..fc56f342 100644 --- a/indexd/index/drivers/query/urls.py +++ b/indexd/index/drivers/query/urls.py @@ -80,7 +80,6 @@ def query_urls( query = query.having( ~q_func["string_agg"](IndexRecordUrl.url, ",").contains(exclude) ) - print(query) # [('did', 'urls')] record_list = ( query.order_by(IndexRecordUrl.did.asc()) diff --git a/indexd/index/drivers/single_table_alchemy.py b/indexd/index/drivers/single_table_alchemy.py new file mode 100644 index 00000000..a57fb68a --- /dev/null +++ b/indexd/index/drivers/single_table_alchemy.py @@ -0,0 +1,1537 @@ +import datetime +import uuid + +from cdislogging import get_logger +from sqlalchemy import ( + Column, + String, + ForeignKey, + BigInteger, + DateTime, + ARRAY, + func, + or_, + text, + not_, + and_, + cast, + TEXT, + select, +) +from sqlalchemy.dialects.postgresql import JSONB, ARRAY +from sqlalchemy.exc import IntegrityError, ProgrammingError +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound +from contextlib import contextmanager + +from indexd import auth +from indexd.errors import UserError, AuthError +from indexd.index.driver import IndexDriverABC +from indexd.index.drivers.alchemy import IndexSchemaVersion, DrsBundleRecord +from indexd.index.errors import ( + MultipleRecordsFound, + NoRecordFound, + RevisionMismatch, + UnhealthyCheck, +) + +Base = declarative_base() + + +class Record(Base): + """ + Base index record representation. + """ + + __tablename__ = "record" + + guid = Column(String, primary_key=True) + + baseid = Column(String, index=True) + rev = Column(String) + form = Column(String) + size = Column(BigInteger) + created_date = Column(DateTime, default=datetime.datetime.utcnow) + updated_date = Column(DateTime, default=datetime.datetime.utcnow) + file_name = Column(String) + version = Column(String) + uploader = Column(String) + description = Column(String) + content_created_date = Column(DateTime) + content_updated_date = Column(DateTime) + hashes = Column(JSONB) + acl = Column(ARRAY(String)) + authz = Column(ARRAY(String)) + urls = Column(ARRAY(String)) + record_metadata = Column(JSONB) + url_metadata = Column(JSONB) + alias = Column(ARRAY(String)) + + def to_document_dict(self): + """ + Get the full index document + """ + acl = self.acl or [] + authz = self.authz or [] + content_created_date = ( + self.content_created_date.isoformat() + if self.content_created_date is not None + else None + ) + content_updated_date = ( + self.content_updated_date.isoformat() + if self.content_updated_date is not None + else None + ) + + return { + "did": self.guid, + "baseid": self.baseid, + "rev": self.rev, + "size": self.size, + "file_name": self.file_name, + "version": self.version, + "uploader": self.uploader, + "urls": self.urls, + "urls_metadata": self.url_metadata, + "acl": acl, + "authz": authz, + "hashes": self.hashes, + "metadata": self.record_metadata, + "form": self.form, + "created_date": self.created_date.isoformat(), + "updated_date": self.updated_date.isoformat(), + "description": self.description, + "content_created_date": content_created_date, + "content_updated_date": content_updated_date, + } + + +class SingleTableSQLAlchemyIndexDriver(IndexDriverABC): + def __init__(self, conn, logger=None, index_config=None, **config): + super().__init__(conn, **config) + self.logger = logger or get_logger("SQLAlchemyIndexDriver") + self.config = index_config or {} + Base.metadata.bind = self.engine + self.Session = sessionmaker(bind=self.engine) + + @property + @contextmanager + def session(self): + """ + Provide a transactional scope around a series of operations. + """ + session = self.Session() + session.begin() + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() + + def ids( + self, + limit=100, + start=None, + size=None, + urls=None, + acl=None, + authz=None, + hashes=None, + file_name=None, + version=None, + uploader=None, + metadata=None, + ids=None, + urls_metadata=None, + negate_params=None, + page=None, + ): + """ + Returns list of records stored by the backend. + """ + with self.session as session: + query = session.query(Record) + + if start is not None: + query = query.filter(Record.guid > start) + + if size is not None: + query = query.filter(Record.size == size) + + if file_name is not None: + query = query.filter(Record.file_name == file_name) + + if version is not None: + query = query.filter(Record.version == version) + + if uploader is not None: + query = query.filter(Record.uploader == uploader) + + if urls: + for u in urls: + query = query.filter(Record.urls.any(u)) + + if acl: + for u in acl: + query = query.filter(Record.acl.any(u)) + elif acl == []: + query = query.filter(Record.acl == None) + + if authz: + for u in authz: + query = query.filter(Record.authz.any(u)) + elif authz == []: + query = query.filter(Record.authz == None) + + if hashes: + for h, v in hashes.items(): + query = query.filter(Record.hashes == {h: v}) + + if metadata: + for k, v in metadata.items(): + query = query.filter(Record.record_metadata[k].astext == v) + + if urls_metadata: + for url_key, url_dict in urls_metadata.items(): + matches = "" + for k, v in url_dict.items(): + matches += '@.{} == "{}" && '.format(k, v) + if matches: + matches = matches.rstrip("&& ") + match_string = "$.* ? ({})".format(matches) + query = query.filter( + func.jsonb_path_exists(Record.url_metadata, match_string) + ) + + if negate_params: + query = self._negate_filter(session, query, **negate_params) + + if page is not None: + # order by updated date so newly added stuff is + # at the end (reduce risk that a new records ends up in a page + # earlier on) and allows for some logic to check for newly added records + # (e.g. parallelly processing from beginning -> middle and ending -> middle + # and as a final step, checking the "ending"+1 to see if there are + # new records). + query = query.order_by(Record.updated_date) + else: + query = query.order_by(Record.guid) + + if ids: + DEFAULT_PREFIX = self.config.get("DEFAULT_PREFIX") + found_ids = [] + new_ids = [] + + if not DEFAULT_PREFIX: + self.logger.info("NO DEFAULT_PREFIX") + else: + subquery = query.filter(Record.guid.in_(ids)) + found_ids = [i.guid for i in subquery] + + for i in ids: + if i not in found_ids: + if not i.startswith(DEFAULT_PREFIX): + new_ids.append(DEFAULT_PREFIX + i) + else: + stripped = i.split(DEFAULT_PREFIX, 1)[1] + new_ids.append(stripped) + + query = query.filter(Record.guid.in_(found_ids + new_ids)) + else: + query = query.limit(limit) + + if page is not None: + query = query.offset(limit * page) + + return [i.to_document_dict() for i in query] + + @staticmethod + def _negate_filter( + session, + query, + urls=None, + acl=None, + authz=None, + file_name=None, + version=None, + metadata=None, + urls_metadata=None, + ): + """ + param_values passed in here will be negated + + for string (version, file_name), filter with value != + for list (urls, acl), filter with doc that don't HAS + for dict (metadata, urls_metadata). In each (key,value) pair: + - if value is None or empty: then filter with key doesn't exist + - if value is provided, then filter with value != OR key doesn't exist + + Args: + session: db session + query: sqlalchemy query + urls (list): doc.urls don't have any in the urls list + acl (list): doc.acl don't have any in the acl list + authz (list): doc.authz don't have any in the authz list + file_name (str): doc.file_name != + version (str): doc.version != + metadata (dict): see above for dict + urls_metadata (dict): see above for dict + + Returns: + Database query + """ + if file_name is not None: + query = query.filter(Record.file_name != file_name) + + if version is not None: + query = query.filter(Record.version != version) + + if urls is not None and urls: + for u in urls: + query = query.filter(not_(Record.urls.any(u))) + + if acl is not None and acl: + for u in acl: + query = query.filter( + Record.acl.isnot(None), + func.array_length(Record.acl, 1) > 0, + not_(Record.acl.any(u)), + ) + + if authz is not None and authz: + for u in authz: + query = query.filter( + Record.authz.isnot(None), + func.array_length(Record.authz, 1) > 0, + not_(Record.authz.any(u)), + ) + + if metadata is not None and metadata: + for k, v in metadata.items(): + if not v: + query = query.filter(~text(f"record_metadata ? :key")).params(key=k) + else: + query = query.filter(Record.record_metadata[k].astext != v) + + if urls_metadata is not None and urls_metadata: + for url_key, url_dict in urls_metadata.items(): + if not url_dict: + query = query.filter( + ~text( + f"EXISTS (SELECT 1 FROM UNNEST(urls) AS element WHERE element LIKE '%{url_key}%')" + ) + ) + query = query.filter( + ~text( + f"EXISTS (SELECT 1 FROM jsonb_object_keys(url_metadata) AS key WHERE key LIKE '%{url_key}%')" + ) + ) + else: + for k, v in url_dict.items(): + if not v: + query = session.query(Record).filter( + text( + f"EXISTS (SELECT 1 FROM jsonb_each_text(url_metadata) AS x WHERE x.value LIKE '%{k}%')" + ) + ) + else: + query = query.filter( + text( + "url_metadata IS NOT NULL AND url_metadata != '{}'" + ), + ~func.jsonb_path_match( + Record.url_metadata, '$.*.{} == "{}"'.format(k, v) + ), + ) + + return query + + def get_urls(self, size=None, hashes=None, ids=None, start=0, limit=100): + """ + Returns a list of urls matching supplied size/hashes/guids. + """ + if size is None and hashes is None and ids is None: + raise UserError("Please provide size/hashes/ids to filter") + + with self.session as session: + query = session.query(Record) + + if size: + query = query.filter(Record.size == size) + if hashes: + for h, v in hashes.items(): + query = query.filter(Record.hashes.contains({h: v})) + if ids: + query = query.filter(Record.guid.in_(ids)) + # Remove duplicates. + query = query.distinct() + + # Return only specified window. + query = query.offset(start) + query = query.limit(limit) + return_urls = [] + for r in query: + for url, values in r.url_metadata.items(): + return_urls.append( + { + "url": url, + "metadata": values, + } + ) + + return return_urls + + def _validate_and_set_content_dates( + self, record, content_created_date, content_updated_date + ): + if content_created_date is not None: + record.content_created_date = datetime.datetime.fromisoformat( + content_created_date + ) + # Users cannot set content_updated_date without a content_created_date + record.content_updated_date = ( + datetime.datetime.fromisoformat(content_updated_date) + if content_updated_date is not None + else record.content_created_date # Set updated to created if no updated is provided + ) + + def add( + self, + form, + guid=None, + size=None, + file_name=None, + metadata=None, + urls_metadata=None, + version=None, + urls=None, + acl=None, + authz=None, + hashes=None, + baseid=None, + uploader=None, + description=None, + content_created_date=None, + content_updated_date=None, + ): + """ + Creates a new record given size, urls, acl, authz, hashes, metadata, + url_metadata file name and version + if guid is provided, update the new record with the guid otherwise create it + """ + + urls = urls or [] + acl = acl or [] + authz = authz or [] + hashes = hashes or {} + metadata = metadata or {} + url_metadata = urls_metadata or {} + + with self.session as session: + record = Record() + + if not baseid: + baseid = str(uuid.uuid4()) + + record.baseid = baseid + record.file_name = file_name + record.version = version + + if guid: + record.guid = guid + else: + new_guid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + new_guid = self.config["DEFAULT_PREFIX"] + new_guid + record.guid = new_guid + + record.rev = str(uuid.uuid4())[:8] + + record.form, record.size = form, size + + record.uploader = uploader + + record.urls = list(set(urls)) + + record.acl = list(set(acl)) + + record.authz = list(set(authz)) + + record.hashes = hashes + + record.record_metadata = metadata + + record.description = description + + self._validate_and_set_content_dates( + record=record, + content_created_date=content_created_date, + content_updated_date=content_updated_date, + ) + try: + check_url_metadata(url_metadata, record) + record.url_metadata = url_metadata + if self.config.get("ADD_PREFIX_ALIAS"): + prefix = self.config["DEFAULT_PREFIX"] + record.alias = list(set([prefix + record.guid])) + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound( + 'guid "{guid}" already exists'.format(guid=record.guid) + ) + except Exception as e: + print(e) + + return record.guid, record.rev, record.baseid + + def add_blank_record(self, uploader, file_name=None, authz=None): + """ + Create a new blank record with only uploader and optionally + file_name and authz fields filled + """ + # if an authz is provided, ensure that user can actually create for that resource + authorized = False + authz_err_msg = "Auth error when attempting to update a blank record. User must have '{}' access on '{}' for service 'indexd'." + if authz: + try: + auth.authorize("create", authz) + authorized = True + except AuthError as err: + self.logger.error( + authz_err_msg.format("create", authz) + + " Falling back to 'file_upload' on '/data_file'." + ) + + if not authorized: + # either no 'authz' was provided, or user doesn't have + # the right CRUD access. Fall back on 'file_upload' logic + try: + auth.authorize("file_upload", ["/data_file"]) + except AuthError as err: + self.logger.error(authz_err_msg.format("file_upload", "/data_file")) + raise + + with self.session as session: + record = Record() + + did = str(uuid.uuid4()) + baseid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + did = self.config["DEFAULT_PREFIX"] + did + + record.guid = did + record.baseid = baseid + + record.rev = str(uuid.uuid4())[:8] + record.baseid = baseid + record.uploader = uploader + record.file_name = file_name + + record.authz = authz + + session.add(record) + session.commit() + + return record.guid, record.rev, record.baseid + + def update_blank_record(self, did, rev, size, hashes, urls, authz=None): + """ + Update a blank record with size, hashes, urls, authz and raise + exception if the record is non-empty or the revision is not matched + """ + hashes = hashes or {} + urls = urls or [] + + with self.session as session: + query = session.query(Record).filter(Record.guid == did) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if record.size or record.hashes: + raise UserError("update api is not supported for non-empty record!") + + if rev != record.rev: + raise RevisionMismatch("revision mismatch") + + record.size = size + + record.hashes = hashes + + record.urls = list(set(urls)) + + authorized = False + authz_err_msg = "Auth error when attempting to update a blank record. User must have '{}' access on '{}' for service 'indexd'." + + if authz: + # if an authz is provided, ensure that user can actually + # create/update for that resource (old authz and new authz) + old_authz = [u for u in record.authz] if record.authz else [] + all_authz = old_authz + authz + try: + auth.authorize("update", all_authz) + authorized = True + except AuthError as err: + self.logger.error( + authz_err_msg.format("update", all_authz) + + " Falling back to 'file_upload' on '/data_file'." + ) + + record.authz = set(authz) + + if not authorized: + # either no 'authz' was provided, or user doesn't have + # the right CRUD access. Fall back on 'file_upload' logic + try: + auth.authorize("file_upload", ["/data_file"]) + except AuthError as err: + self.logger.error(authz_err_msg.format("file_upload", "/data_file")) + raise + + record.rev = str(uuid.uuid4())[:8] + + record.updated_data = datetime.datetime.utcnow() + + session.add(record) + session.commit() + + return record.guid, record.rev, record.baseid + + def get_by_alias(self, alias): + """ + Gets a record given a record alias + """ + with self.session as session: + try: + record = session.query(Record).filter(Record.alias.any(alias)).one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + return record.to_document_dict + + def get_aliases_for_did(self, did): + """ + Gets the aliases for a did + """ + with self.session as session: + self.logger.info(f"Trying to get all aliases for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + query = session.query(Record).filter(Record.guid == did) + return [i.alias for i in query] + + def append_aliases_for_did(self, aliases, did): + """ + Append one or more aliases to aliases already associated with one DID / GUID. + """ + with self.session as session: + self.logger.info( + f"Trying to append new aliases {aliases} to aliases for did {did}..." + ) + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("update", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while appending aliases to did {did}: User not authorized to update one or more of these resources: {resources}" + ) + raise err + + # add new aliases + query = session.query(Record).filter(Record.guid == did) + record = query.one() + + try: + record.alias = record.alias + aliases + session.commit() + except IntegrityError as err: + # One or more aliases in request were non-unique + self.logger.warning( + f"One or more aliases in request already associated with this or another GUID: {aliases}", + exc_info=True, + ) + raise MultipleRecordsFound( + f"One or more aliases in request already associated with this or another GUID: {aliases}" + ) + + def replace_aliases_for_did(self, aliases, did): + """ + Replace all aliases for one DID / GUID with new aliases. + """ + with self.session as session: + self.logger.info( + f"Trying to replace aliases for did {did} with new aliases {aliases}..." + ) + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("update", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while replacing aliases for did {did}: User not authorized to update one or more of these resources: {resources}" + ) + raise err + + try: + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete this GUID's aliases + record.alias = aliases + session.commit() + self.logger.info( + f"Replaced aliases for did {did} with new aliases {aliases}" + ) + except IntegrityError: + # One or more aliases in request were non-unique + self.logger.warning( + f"One or more aliases in request already associated with another GUID: {aliases}" + ) + raise MultipleRecordsFound( + f"One or more aliases in request already associated with another GUID: {aliases}" + ) + + def delete_all_aliases_for_did(self, did): + """ + Delete all of this DID / GUID's aliases. + """ + with self.session as session: + self.logger.info(f"Trying to delete all aliases for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("delete", resources) + except AuthError as err: + self.logger.warning( + f"Auth error while deleting all aliases for did {did}: User not authorized to delete one or more of these resources: {resources}" + ) + raise err + + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete this GUID's aliases and add new aliases + record.alias = [] + session.commit() + + self.logger.info(f"Deleted all aliases for did {did}.") + + def delete_one_alias_for_did(self, alias, did): + """ + Delete one of this DID / GUID's aliases. + """ + with self.session as session: + self.logger.info(f"Trying to delete alias {alias} for did {did}...") + + index_record = get_record_if_exists(did, session) + if index_record is None: + self.logger.warning(f"No record found for did {did}") + raise NoRecordFound(did) + + # authorization + try: + resources = [u.resource for u in index_record.authz] + auth.authorize("delete", resources) + except AuthError as err: + self.logger.warning( + f"Auth error deleting alias {alias} for did {did}: User not authorized to delete one or more of these resources: {resources}" + ) + raise err + + query = session.query(Record).filter(Record.guid == did) + record = query.one() + # delete just this alias + if alias in record.alias: + record.alias.remove(alias) + session.commit() + else: + self.logger.warning(f"No alias {alias} found for did {did}") + raise NoRecordFound(alias) + + self.logger.info(f"Deleted alias {alias} for did {did}.") + + def get(self, guid, expand=True): + """ + Gets a record given the record id or baseid. + If the given id is a baseid, it will return the latest version + """ + with self.session as session: + query = session.query(Record) + query = query.filter( + or_(Record.guid == guid, Record.baseid == guid) + ).order_by(Record.created_date.desc()) + + record = query.first() + if record is None: + try: + record = self.get_bundle(bundle_id=guid, expand=expand) + return record + except NoRecordFound: + raise NoRecordFound("no record found") + + return record.to_document_dict() + + def get_with_nonstrict_prefix(self, guid, expand=True): + """ + Attempt to retrieve a record both with and without a prefix. + Proxies 'get' with provided id. + If not found but prefix matches default, attempt with prefix stripped. + If not found and id has no prefix, attempt with default prefix prepended. + """ + try: + record = self.get(guid, expand=expand) + except NoRecordFound as e: + DEFAULT_PREFIX = self.config.get("DEFAULT_PREFIX") + if not DEFAULT_PREFIX: + raise e + + if not guid.startswith(DEFAULT_PREFIX): + record = self.get(DEFAULT_PREFIX + guid, expand=expand) + else: + stripped = guid.split(DEFAULT_PREFIX, 1)[1] + record = self.get(stripped, expand=expand) + + return record + + def update(self, did, rev, changing_fields): + """ + Updates an existing record with new values. + """ + authz_err_msg = "Auth error when attempting to update a record. User must have '{}' access on '{}' for service 'indexd'." + + composite_fields = [ + "urls", + "acl", + "authz", + "record_metadata", + "url_metadata", + "content_created_date", + "content_updated_date", + ] + + with self.session as session: + query = session.query(Record).filter(Record.guid == did) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no Record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if rev != record.rev: + raise RevisionMismatch("Revision mismatch") + + # Some operations are dependant on other operations. For example + # urls has to be updated before url_metadata because of schema + # constraints. + if "urls" in changing_fields: + record.urls = list(set(changing_fields["urls"])) + + if "acl" in changing_fields: + record.acl = list(set(changing_fields["acl"])) + + all_authz = list(set(record.authz)) if record.authz else [] + if "authz" in changing_fields: + new_authz = list(set(changing_fields["authz"])) + all_authz += new_authz + record.authz = new_authz + + # authorization check: `update` access on old AND new resources + try: + auth.authorize("update", all_authz) + except AuthError: + self.logger.error(authz_err_msg.format("update", all_authz)) + raise + + if "metadata" in changing_fields: + record.record_metadata = changing_fields["metadata"] + + if "urls_metadata" in changing_fields: + check_url_metadata(changing_fields["urls_metadata"], record) + record.url_metadata = changing_fields["urls_metadata"] + + if changing_fields.get("content_created_date") is not None: + record.content_created_date = datetime.datetime.fromisoformat( + changing_fields["content_created_date"] + ) + if changing_fields.get("content_updated_date") is not None: + if record.content_created_date is None: + raise UserError( + "Cannot set content_updated_date on Record that does not have a content_created_date" + ) + if record.content_created_date > datetime.datetime.fromisoformat( + changing_fields["content_updated_date"] + ): + raise UserError( + "Cannot set content_updated_date before the content_created_date" + ) + + record.content_updated_date = datetime.datetime.fromisoformat( + changing_fields["content_updated_date"] + ) + + for key, value in changing_fields.items(): + if key not in composite_fields: + # No special logic needed for other updates. + # ie file_name, version, etc + setattr(record, key, value) + + record.rev = str(uuid.uuid4())[:8] + + record.updated_date = datetime.datetime.utcnow() + + session.add(record) + + return record.guid, record.baseid, record.rev + + def delete(self, guid, rev): + """ + Removes record if stored by backend. + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + if rev != record.rev: + raise RevisionMismatch("revision mismatch") + + auth.authorize("delete", [u.resource for u in record.authz]) + + session.delete(record) + + def add_version( + self, + current_guid, + form, + new_did=None, + size=None, + file_name=None, + metadata=None, + urls_metadata=None, + version=None, + urls=None, + acl=None, + authz=None, + hashes=None, + description=None, + content_created_date=None, + content_updated_date=None, + ): + """ + Add a record version given guid + """ + urls = urls or [] + acl = acl or [] + authz = authz or [] + hashes = hashes or {} + metadata = metadata or {} + urls_metadata = urls_metadata or {} + + with self.session as session: + query = session.query(Record).filter_by(guid=current_guid) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + auth.authorize("update", [u for u in record.authz] + authz) + + baseid = record.baseid + record = Record() + guid = new_did + if not guid: + guid = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + guid = self.config["DEFAULT_PREFIX"] + guid + + record.guid = guid + record.baseid = baseid + record.rev = str(uuid.uuid4())[:8] + record.form = form + record.size = size + record.file_name = file_name + record.version = version + record.description = description + record.urls = urls + record.acl = acl + record.authz = authz + record.hashes = hashes + record.record_metadata = metadata + + self._validate_and_set_content_dates( + record=record, + content_created_date=content_created_date, + content_updated_date=content_updated_date, + ) + + check_url_metadata(urls_metadata, record) + record.url_metadata = urls_metadata + + try: + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) + + return record.guid, record.baseid, record.rev + + def add_blank_version( + self, current_guid, new_did=None, file_name=None, uploader=None, authz=None + ): + """ + Add a blank record version given did. + If authz is not specified, acl/authz fields carry over from previous version. + """ + # if an authz is provided, ensure that user can actually create for that resource + authz_err_msg = "Auth error when attempting to update a record. User must have '{}' access on '{}' for service 'indexd'." + if authz: + try: + auth.authorize("create", authz) + except AuthError as err: + self.logger.error(authz_err_msg.format("create", authz)) + raise + + with self.session as session: + query = session.query(Record).filter_by(guid=current_guid) + + try: + old_record = query.one() + except NoResultFound: + raise NoRecordFound("no record found") + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + old_authz = old_record.authz + try: + auth.authorize("update", old_authz) + except AuthError as err: + self.logger.error(authz_err_msg.format("update", old_authz)) + raise + + # handle the edgecase where new_did matches the original doc's guid to + # prevent sqlalchemy FlushError + if new_did == old_record.guid: + raise MultipleRecordsFound("{guid} already exists".format(guid=new_did)) + + new_record = Record() + guid = new_did + if not guid: + guid = str(uuid.uuid4()) + if self.config.get("PEPREND_PREFIX"): + guid = self.config["DEFAULT_PREFIX"] + guid + + new_record.guid = guid + new_record.baseid = old_record.baseid + new_record.rev = str(uuid.uuid4())[:8] + new_record.file_name = old_record.file_name + new_record.uploader = old_record.uploader + + new_record.acl = [] + if not authz: + authz = old_authz + old_acl = old_record.acl + new_record.acl = old_acl + new_record.authz = authz + + try: + session.add(new_record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound("{guid} already exists".format(guid=guid)) + + return new_record.guid, new_record.baseid, new_record.rev + + def get_all_versions(self, guid): + """ + Get all record versions (in order of creation) given DID + """ + ret = dict() + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + record = session.query(Record).filter_by(baseid=guid).first() + if not record: + raise NoRecordFound("no record found") + else: + baseid = record.baseid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + # Find all versions of this record + query = session.query(Record) + records = ( + query.filter(Record.baseid == baseid) + .order_by(Record.created_date.asc()) + .all() + ) + + for idx, record in enumerate(records): + ret[idx] = record.to_document_dict() + + return ret + + def update_all_versions(self, guid, acl=None, authz=None): + """ + Update all record versions with new acl and authz + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + record = session.query(Record).filter_by(baseid=guid).first() + if not record: + raise NoRecordFound("no record found") + else: + baseid = record.baseid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + # Find all versions of this record + query = session.query(Record) + records = ( + query.filter(Record.baseid == baseid) + .order_by(Record.created_date.asc()) + .all() + ) + + # User requires update permissions for all versions of the record + all_resources = [] + for rec in records: + all_resources += rec.authz + auth.authorize("update", list(all_resources)) + + ret = [] + # Update fields for all versions + for record in records: + record.acl = set(acl) if acl else None + record.authz = set(authz) if authz else None + + record.rev = str(uuid.uuid4())[:8] + ret.append( + {"did": record.guid, "baseid": record.baseid, "rev": record.rev} + ) + session.commit() + return ret + + def get_latest_version(self, guid, has_version=None): + """ + Get the lattest record version given did + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == guid) + + try: + record = query.one() + baseid = record.baseid + except NoResultFound: + baseid = guid + except MultipleResultsFound: + raise MultipleRecordsFound("multiple records found") + + query = session.query(Record) + query = query.filter(Record.baseid == baseid).order_by( + Record.created_date.desc() + ) + + if has_version: + query = query.filter(Record.version.isnot(None)) + record = query.first() + if not record: + raise NoRecordFound("no record found") + + return record.to_document_dict() + + def health_check(self): + """ + Does a health check of the backend. + """ + with self.session as session: + try: + query = session.execute("SELECT 1") # pylint: disable=unused-variable + except Exception: + raise UnhealthyCheck() + + return True + + def __contains__(self, record): + """ + Returns True if record is stored by backend. + Returns False otherwise. + """ + with self.session as session: + query = session.query(Record) + query = query.filter(Record.guid == record) + + return query.exists() + + def __iter__(self): + """ + Iterator over unique records stored by backend. + """ + with self.session as session: + for i in session.query(Record): + yield i.did + + def totalbytes(self): + """ + Total number of bytes of data represented in the index. + """ + with self.session as session: + result = session.execute(select([func.sum(Record.size)])).scalar() + if result is None: + return 0 + return int(result) + + def len(self): + """ + Number of unique records stored by backend. + """ + with self.session as session: + return session.execute(select([func.count()]).select_from(Record)).scalar() + + def add_bundle( + self, + bundle_id=None, + name=None, + checksum=None, + size=None, + bundle_data=None, + description=None, + version=None, + aliases=None, + ): + """ + Add a bundle record + """ + with self.session as session: + record = DrsBundleRecord() + if not bundle_id: + bundle_id = str(uuid.uuid4()) + if self.config.get("PREPEND_PREFIX"): + bundle_id = self.config["DEFAULT_PREFIX"] + bundle_id + if not name: + name = bundle_id + + record.bundle_id = bundle_id + + record.name = name + + record.checksum = checksum + + record.size = size + + record.bundle_data = bundle_data + + record.description = description + + record.version = version + + record.aliases = aliases + + try: + session.add(record) + session.commit() + except IntegrityError: + raise MultipleRecordsFound( + 'bundle id "{bundle_id}" already exists'.format( + bundle_id=record.bundle_id + ) + ) + + return record.bundle_id, record.name, record.bundle_data + + def get_bundle_list(self, start=None, limit=100, page=None): + """ + Returns list of all bundles + """ + with self.session as session: + query = session.query(DrsBundleRecord) + query = query.limit(limit) + + if start is not None: + query = query.filter(DrsBundleRecord.bundle_id > start) + + if page is not None: + query = query.offset(limit * page) + + return [i.to_document_dict() for i in query] + + def get_bundle(self, bundle_id, expand=False): + """ + Gets a bundle record given the bundle_id. + """ + with self.session as session: + query = session.query(DrsBundleRecord) + + query = query.filter(or_(DrsBundleRecord.bundle_id == bundle_id)).order_by( + DrsBundleRecord.created_time.desc() + ) + + record = query.first() + if record is None: + raise NoRecordFound("No bundle found") + + doc = record.to_document_dict(expand) + + return doc + + def get_bundle_and_object_list( + self, + limit=100, + page=None, + start=None, + size=None, + urls=None, + acl=None, + authz=None, + hashes=None, + file_name=None, + version=None, + uploader=None, + metadata=None, + ids=None, + urls_metadata=None, + negate_params=None, + ): + """ + Gets bundles and objects and orders them by created time. + """ + limit = int((limit / 2) + 1) + bundle = self.get_bundle_list(start=start, limit=limit, page=page) + objects = self.ids( + limit=limit, + page=page, + start=start, + size=size, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + file_name=file_name, + version=version, + uploader=uploader, + metadata=metadata, + ids=ids, + urls_metadata=urls_metadata, + negate_params=negate_params, + ) + + ret = [] + i = 0 + j = 0 + + while i + j < len(bundle) + len(objects): + if i != len(bundle) and ( + j == len(objects) + or bundle[i]["created_time"] > objects[j]["created_date"] + ): + ret.append(bundle[i]) + i += 1 + else: + ret.append(objects[j]) + j += 1 + return ret + + def delete_bundle(self, bundle_id): + with self.session as session: + query = session.query(DrsBundleRecord) + query = query.filter(DrsBundleRecord.bundle_id == bundle_id) + + try: + record = query.one() + except NoResultFound: + raise NoRecordFound("No bundle found") + except MultipleResultsFound: + raise MultipleRecordsFound("Multiple bundles found") + + session.delete(record) + + def query_urls( + self, + exclude=None, + include=None, + versioned=None, + offset=0, + limit=1000, + fields="did,urls", + **kwargs, + ): + if kwargs: + raise UserError( + "Unexpected query parameter(s) {}".format(list(kwargs.keys())) + ) + + versioned = ( + versioned.lower() in ["true", "t", "yes", "y"] if versioned else None + ) + + with self.session as session: + query = session.query(Record.guid, Record.urls) + + # add version filter if versioned is not None + if versioned is True: # retrieve only those with a version number + query = query.filter(Record.version.isnot(None)) + elif versioned is False: # retrieve only those without a version number + query = query.filter(~Record.version.isnot(None)) + + query = query.group_by(Record.guid) + + # add url filters + if include and exclude: + query = query.having( + and_( + ~func.array_to_string(Record.urls, ",").contains(exclude), + func.array_to_string(Record.urls, ",").contains(include), + ) + ) + elif include: + query = query.having( + func.array_to_string(Record.urls, ",").contains(include) + ) + elif exclude: + query = query.having( + ~func.array_to_string(Record.urls, ",").contains(exclude) + ) + record_list = ( + query.order_by(Record.guid.asc()).offset(offset).limit(limit).all() + ) + return self._format_response(fields, record_list) + + def query_metadata_by_key( + self, + key, + value, + url=None, + versioned=None, + offset=0, + limit=1000, + fields="did,urls,rev", + **kwargs, + ): + if kwargs: + raise UserError( + "Unexpected query parameter(s) {}".format(list(kwargs.keys())) + ) + + versioned = ( + versioned.lower() in ["true", "t", "yes", "y"] if versioned else None + ) + with self.session as session: + query = session.query(Record.guid, Record.urls, Record.rev) + + query = query.filter( + func.jsonb_path_exists( + Record.url_metadata, f'$.* ? (@.{key} == "{value}")' + ) + ) + + # add version filter if versioned is not None + if versioned is True: # retrieve only those with a version number + query = query.filter(Record.version.isnot(None)) + elif versioned is False: # retrieve only those without a version number + query = query.filter(~Record.version.isnot(None)) + + if url: + query = query.filter( + func.array_to_string(Record.urls, ",").contains(url) + ) + # [('did', 'url', 'rev')] + record_list = ( + query.order_by(Record.guid.asc()).offset(offset).limit(limit).all() + ) + return self._format_response(fields, record_list) + + @staticmethod + def _format_response(requested_fields, record_list): + """loops through the query result and removes undesired columns and converts result of urls string_agg to list + Args: + requested_fields (str): comma separated list of fields to return, if not specified return all fields + record_list (list(tuple]): must be of the form [(did, urls, rev)], rev is not required for urls query + Returns: + list[dict]: list of response dicts + """ + result = [] + provided_fields_dict = {k: 1 for k in requested_fields.split(",")} + for record in record_list: + resp_dict = {} + if provided_fields_dict.get("did"): + resp_dict["did"] = record[0] + if provided_fields_dict.get("urls"): + resp_dict["urls"] = record[1] if record[1] else [] + + # check if record is returned in tuple + if provided_fields_dict.get("rev") and len(record) == 3: + resp_dict["rev"] = record[2] + result.append(resp_dict) + return result + + +def check_url_metadata(url_metadata, record): + """ + create url metadata record in database + """ + urls = {u for u in record.urls} + for url in url_metadata: + if url not in urls: + raise UserError("url {} in url_metadata does not exist".format(url)) + + +def get_record_if_exists(did, session): + """ + Searches for a record with this did and returns it. + If no record found, returns None. + """ + return session.query(Record).filter(Record.guid == did).first() diff --git a/indexd/urls/blueprint.py b/indexd/urls/blueprint.py index 7d34d75a..03208bc1 100644 --- a/indexd/urls/blueprint.py +++ b/indexd/urls/blueprint.py @@ -5,6 +5,7 @@ from indexd.errors import UserError from indexd.index.drivers.query.urls import AlchemyURLsQueryDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver blueprint = Blueprint("urls", __name__) @@ -71,7 +72,11 @@ def query_metadata(): def pre_config(state): driver = state.app.config["INDEX"]["driver"] blueprint.logger = state.app.logger - blueprint.driver = AlchemyURLsQueryDriver(driver) + blueprint.driver = ( + driver + if type(driver) == SingleTableSQLAlchemyIndexDriver + else AlchemyURLsQueryDriver(driver) + ) @blueprint.errorhandler(UserError) diff --git a/indexd/utils.py b/indexd/utils.py index 29a467ab..4c184e0f 100644 --- a/indexd/utils.py +++ b/indexd/utils.py @@ -18,7 +18,7 @@ def try_drop_test_data( user, database, root_user="postgres", host="" ): # pragma: no cover engine = create_engine( - "postgres://{user}@{host}/postgres".format(user=root_user, host=host) + "postgresql://{user}@{host}/postgres".format(user=root_user, host=host) ) conn = engine.connect() @@ -50,7 +50,7 @@ def setup_database( try_drop_test_data(user, database) engine = create_engine( - "postgres://{user}@{host}/postgres".format(user=root_user, host=host) + "postgresql://{user}@{host}/postgres".format(user=root_user, host=host) ) conn = engine.connect() conn.execute("commit") @@ -84,7 +84,7 @@ def create_tables(host, user, password, database): # pragma: no cover create tables """ engine = create_engine( - "postgres://{user}:{pwd}@{host}/{db}".format( + "postgresql://{user}:{pwd}@{host}/{db}".format( user=user, host=host, pwd=password, db=database ) ) diff --git a/migrations/versions/bb3d7586a096_createsingletable.py b/migrations/versions/bb3d7586a096_createsingletable.py new file mode 100644 index 00000000..b1798aa4 --- /dev/null +++ b/migrations/versions/bb3d7586a096_createsingletable.py @@ -0,0 +1,48 @@ +"""CreateSingleTable + +Revision ID: bb3d7586a096 +Revises: a72f117515c5 +Create Date: 2023-10-24 14:46:03.868952 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB + + +# revision identifiers, used by Alembic. +revision = "bb3d7586a096" # pragma: allowlist secret +down_revision = "a72f117515c5" +branch_labels = None +depends_on = None + + +# TODO: We need another migration that clears up old tables +def upgrade() -> None: + op.create_table( + "record", + sa.Column("guid", sa.VARCHAR(), primary_key=True), + sa.Column("baseid", sa.VARCHAR(), index=True), + sa.Column("rev", sa.VARCHAR()), + sa.Column("form", sa.VARCHAR()), + sa.Column("size", sa.BIGINT()), + sa.Column("created_date", sa.DateTime, nullable=True), + sa.Column("updated_date", sa.DateTime, nullable=True), + sa.Column("file_name", sa.VARCHAR()), + sa.Column("version", sa.VARCHAR()), + sa.Column("uploader", sa.VARCHAR()), + sa.Column("description", sa.VARCHAR()), + sa.Column("content_created_date", sa.DateTime), + sa.Column("content_updated_date", sa.DateTime), + sa.Column("hashes", JSONB()), + sa.Column("acl", sa.ARRAY(sa.VARCHAR())), + sa.Column("authz", sa.ARRAY(sa.VARCHAR())), + sa.Column("urls", sa.ARRAY(sa.VARCHAR())), + sa.Column("record_metadata", JSONB()), + sa.Column("url_metadata", JSONB()), + sa.Column("alias", sa.ARRAY(sa.VARCHAR())), + ) + + +def downgrade() -> None: + op.drop_table("record") diff --git a/poetry.lock b/poetry.lock index 1f6445de..8603d2e3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,14 +1,14 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -21,13 +21,13 @@ tz = ["backports.zoneinfo"] [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -53,32 +53,32 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "authlib" -version = "1.3.0" +version = "1.3.2" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false python-versions = ">=3.8" files = [ - {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, - {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, + {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, + {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, ] [package.dependencies] @@ -86,19 +86,20 @@ cryptography = "*" [[package]] name = "authutils" -version = "6.2.3" +version = "6.2.5" description = "Gen3 auth utility functions" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "authutils-6.2.3-py3-none-any.whl", hash = "sha256:8c4e7e24183cbc1055ab91eaf9d2966f0da5b739be0fabfe380749674ba3057e"}, - {file = "authutils-6.2.3.tar.gz", hash = "sha256:c44e1f309b2b5b6db1276f69e1e18ec5c6be1ae33bfe52608a30049b8669ff7b"}, + {file = "authutils-6.2.5-py3-none-any.whl", hash = "sha256:ef91c9c7c750123c28b7376be9ca00b4e89b2d52fa183dec9bfe681d8eac6227"}, + {file = "authutils-6.2.5.tar.gz", hash = "sha256:0d496721e9f0d8c69b34aff8f6fccdc7768ca4f104504d68e70fd647d4c23b19"}, ] [package.dependencies] authlib = ">=1.1.0" cached-property = ">=1.4,<2.0" cdiserrors = "<2.0.0" +cryptography = ">=41.0.6" httpx = ">=0.23.0,<1.0.0" pyjwt = {version = ">=2.4.0,<3.0", extras = ["crypto"]} xmltodict = ">=0.9,<1.0" @@ -120,13 +121,13 @@ files = [ [[package]] name = "blinker" -version = "1.7.0" +version = "1.8.2" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.8" files = [ - {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, - {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] [[package]] @@ -167,91 +168,91 @@ files = [ {file = "cdislogging-1.1.1.tar.gz", hash = "sha256:77e11648244cda3a8094b8ae6081435a2303f259612846c49ef8825c7be141e3"}, ] -[[package]] -name = "cdisutilstest" -version = "0.2.4" -description = "Collection of test data and tools" -optional = false -python-versions = "*" -files = [] -develop = false - -[package.source] -type = "git" -url = "https://github.com/uc-cdis/cdisutils-test" -reference = "1.0.0" -resolved_reference = "bdfdeb05e45407e839fd954ce6d195d847cd8024" - [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -464,43 +465,38 @@ yaml = ["PyYAML (>=3.10)"] [[package]] name = "cryptography" -version = "42.0.5" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, - {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, - {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, - {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, - {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, - {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, - {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, - {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, - {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, - {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, - {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, - {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, - {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, - {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -513,7 +509,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -564,13 +560,13 @@ resolved_reference = "38c0f1ab42edf3efb1ad6348d7dbdff81b131360" [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -616,6 +612,77 @@ cdiserrors = "<2.0.0" httpx = ">=0.20.0,<1.0.0" six = ">=1.16.0,<2.0.0" +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + [[package]] name = "h11" version = "0.14.0" @@ -647,13 +714,13 @@ resolved_reference = "f122072ee245216da5e4260f718d6f886db81773" [[package]] name = "httpcore" -version = "1.0.4" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -664,17 +731,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -689,45 +756,46 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.6" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "indexclient" -version = "2.2.1" +version = "2.3.1" description = "" optional = false python-versions = "*" files = [ - {file = "indexclient-2.2.1.tar.gz", hash = "sha256:d0374128a340b42042fc2d7c2484a8b1f36b90e05199e6f8357ad764dd6673fd"}, + {file = "indexclient-2.3.1.tar.gz", hash = "sha256:0beaf865aab58112961092aa58d06e31ca1cc8da26e9cd5cf84430d2f6567a0d"}, ] [package.dependencies] @@ -746,24 +814,24 @@ files = [ [[package]] name = "itsdangerous" -version = "2.1.2" +version = "2.2.0" description = "Safely pass data to untrusted environments and back." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, ] [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -795,13 +863,13 @@ format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-va [[package]] name = "mako" -version = "1.3.2" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, - {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -899,24 +967,24 @@ test = ["pytest (<5.4)", "pytest-cov"] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -958,24 +1026,24 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] @@ -983,8 +1051,8 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -1092,72 +1160,75 @@ docs = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1191,19 +1262,23 @@ tests = ["coverage (>=3.7.1,<6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytes [[package]] name = "setuptools" -version = "69.2.0" +version = "74.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, + {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -1229,58 +1304,80 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.3.24" +version = "1.4.54" description = "Database Abstraction Library" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.3.24-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:87a2725ad7d41cd7376373c15fd8bf674e9c33ca56d0b8036add2d634dba372e"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win32.whl", hash = "sha256:f597a243b8550a3a0b15122b14e49d8a7e622ba1c9d29776af741f1845478d79"}, - {file = "SQLAlchemy-1.3.24-cp27-cp27m-win_amd64.whl", hash = "sha256:fc4cddb0b474b12ed7bdce6be1b9edc65352e8ce66bc10ff8cbbfb3d4047dbf4"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:f1149d6e5c49d069163e58a3196865e4321bad1803d7886e07d8710de392c548"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:14f0eb5db872c231b20c18b1e5806352723a3a89fb4254af3b3e14f22eaaec75"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:e98d09f487267f1e8d1179bf3b9d7709b30a916491997137dd24d6ae44d18d79"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:fc1f2a5a5963e2e73bac4926bdaf7790c4d7d77e8fc0590817880e22dd9d0b8b"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win32.whl", hash = "sha256:f3c5c52f7cb8b84bfaaf22d82cb9e6e9a8297f7c2ed14d806a0f5e4d22e83fb7"}, - {file = "SQLAlchemy-1.3.24-cp35-cp35m-win_amd64.whl", hash = "sha256:0352db1befcbed2f9282e72843f1963860bf0e0472a4fa5cf8ee084318e0e6ab"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:2ed6343b625b16bcb63c5b10523fd15ed8934e1ed0f772c534985e9f5e73d894"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:34fcec18f6e4b24b4a5f6185205a04f1eab1e56f8f1d028a2a03694ebcc2ddd4"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e47e257ba5934550d7235665eee6c911dc7178419b614ba9e1fbb1ce6325b14f"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:816de75418ea0953b5eb7b8a74933ee5a46719491cd2b16f718afc4b291a9658"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win32.whl", hash = "sha256:26155ea7a243cbf23287f390dba13d7927ffa1586d3208e0e8d615d0c506f996"}, - {file = "SQLAlchemy-1.3.24-cp36-cp36m-win_amd64.whl", hash = "sha256:f03bd97650d2e42710fbe4cf8a59fae657f191df851fc9fc683ecef10746a375"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a006d05d9aa052657ee3e4dc92544faae5fcbaafc6128217310945610d862d39"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1e2f89d2e5e3c7a88e25a3b0e43626dba8db2aa700253023b82e630d12b37109"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:0d5d862b1cfbec5028ce1ecac06a3b42bc7703eb80e4b53fceb2738724311443"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:0172423a27fbcae3751ef016663b72e1a516777de324a76e30efa170dbd3dd2d"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win32.whl", hash = "sha256:d37843fb8df90376e9e91336724d78a32b988d3d20ab6656da4eb8ee3a45b63c"}, - {file = "SQLAlchemy-1.3.24-cp37-cp37m-win_amd64.whl", hash = "sha256:c10ff6112d119f82b1618b6dc28126798481b9355d8748b64b9b55051eb4f01b"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:861e459b0e97673af6cc5e7f597035c2e3acdfb2608132665406cded25ba64c7"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5de2464c254380d8a6c20a2746614d5a436260be1507491442cf1088e59430d2"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d375d8ccd3cebae8d90270f7aa8532fe05908f79e78ae489068f3b4eee5994e8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:014ea143572fee1c18322b7908140ad23b3994036ef4c0d630110faf942652f8"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win32.whl", hash = "sha256:6607ae6cd3a07f8a4c3198ffbf256c261661965742e2b5265a77cd5c679c9bba"}, - {file = "SQLAlchemy-1.3.24-cp38-cp38-win_amd64.whl", hash = "sha256:fcb251305fa24a490b6a9ee2180e5f8252915fb778d3dafc70f9cc3f863827b9"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01aa5f803db724447c1d423ed583e42bf5264c597fd55e4add4301f163b0be48"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4d0e3515ef98aa4f0dc289ff2eebb0ece6260bbf37c2ea2022aad63797eacf60"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:bce28277f308db43a6b4965734366f533b3ff009571ec7ffa583cb77539b84d6"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:8110e6c414d3efc574543109ee618fe2c1f96fa31833a1ff36cc34e968c4f233"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win32.whl", hash = "sha256:ee5f5188edb20a29c1cc4a039b074fdc5575337c9a68f3063449ab47757bb064"}, - {file = "SQLAlchemy-1.3.24-cp39-cp39-win_amd64.whl", hash = "sha256:09083c2487ca3c0865dc588e07aeaa25416da3d95f7482c07e92f47e080aa17b"}, - {file = "SQLAlchemy-1.3.24.tar.gz", hash = "sha256:ebbb777cbf9312359b897bf81ba00dae0f5cb69fba2a18265dcc18a6f5ef7519"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:af00236fe21c4d4f4c227b6ccc19b44c594160cc3ff28d104cdce85855369277"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1183599e25fa38a1a322294b949da02b4f0da13dbc2688ef9dbe746df573f8a6"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990d5a6a5dc358a0894c8ca02043fb9a5ad9538422001fb2826e91c50f1d539"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14b3f4783275339170984cadda66e3ec011cce87b405968dc8d51cf0f9997b0d"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b24364150738ce488333b3fb48bfa14c189a66de41cd632796fbcacb26b4585"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win32.whl", hash = "sha256:a8a72259a1652f192c68377be7011eac3c463e9892ef2948828c7d58e4829988"}, + {file = "SQLAlchemy-1.4.54-cp310-cp310-win_amd64.whl", hash = "sha256:b67589f7955924865344e6eacfdcf70675e64f36800a576aa5e961f0008cde2a"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b05e0626ec1c391432eabb47a8abd3bf199fb74bfde7cc44a26d2b1b352c2c6e"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13e91d6892b5fcb94a36ba061fb7a1f03d0185ed9d8a77c84ba389e5bb05e936"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb59a11689ff3c58e7652260127f9e34f7f45478a2f3ef831ab6db7bcd72108f"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win32.whl", hash = "sha256:1390ca2d301a2708fd4425c6d75528d22f26b8f5cbc9faba1ddca136671432bc"}, + {file = "SQLAlchemy-1.4.54-cp311-cp311-win_amd64.whl", hash = "sha256:2b37931eac4b837c45e2522066bda221ac6d80e78922fb77c75eb12e4dbcdee5"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3f01c2629a7d6b30d8afe0326b8c649b74825a0e1ebdcb01e8ffd1c920deb07d"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c24dd161c06992ed16c5e528a75878edbaeced5660c3db88c820f1f0d3fe1f4"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5e0d47d619c739bdc636bbe007da4519fc953393304a5943e0b5aec96c9877c"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win32.whl", hash = "sha256:12bc0141b245918b80d9d17eca94663dbd3f5266ac77a0be60750f36102bbb0f"}, + {file = "SQLAlchemy-1.4.54-cp312-cp312-win_amd64.whl", hash = "sha256:f941aaf15f47f316123e1933f9ea91a6efda73a161a6ab6046d1cde37be62c88"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a41611835010ed4ea4c7aed1da5b58aac78ee7e70932a91ed2705a7b38e40f52"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8c1b9ecaf9f2590337d5622189aeb2f0dbc54ba0232fa0856cf390957584a9"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de620f978ca273ce027769dc8db7e6ee72631796187adc8471b3c76091b809e"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5a2530400a6e7e68fd1552a55515de6a4559122e495f73554a51cedafc11669"}, + {file = "SQLAlchemy-1.4.54-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cf7076c8578b3de4e43a046cc7a1af8466e1c3f5e64167189fe8958a4f9c02"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f1e1b92ee4ee9ffc68624ace218b89ca5ca667607ccee4541a90cc44999b9aea"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cffc63c7c83dfc30c4cab5b4308ba74440a9633c4509c51a0c52431fb0f8ab"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5933c45d11cbd9694b1540aa9076816cc7406964c7b16a380fd84d3a5fe3241"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cafe0ba3a96d0845121433cffa2b9232844a2609fce694fcc02f3f31214ece28"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19f816f4702d7b1951d7576026c7124b9bfb64a9543e571774cf517b7a50b29"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win32.whl", hash = "sha256:76c2ba7b5a09863d0a8166fbc753af96d561818c572dbaf697c52095938e7be4"}, + {file = "SQLAlchemy-1.4.54-cp37-cp37m-win_amd64.whl", hash = "sha256:a86b0e4be775902a5496af4fb1b60d8a2a457d78f531458d294360b8637bb014"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a49730afb716f3f675755afec109895cab95bc9875db7ffe2e42c1b1c6279482"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26e78444bc77d089e62874dc74df05a5c71f01ac598010a327881a48408d0064"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02d2ecb9508f16ab9c5af466dfe5a88e26adf2e1a8d1c56eb616396ccae2c186"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:394b0135900b62dbf63e4809cdc8ac923182af2816d06ea61cd6763943c2cc05"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed3576675c187e3baa80b02c4c9d0edfab78eff4e89dd9da736b921333a2432"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win32.whl", hash = "sha256:fc9ffd9a38e21fad3e8c5a88926d57f94a32546e937e0be46142b2702003eba7"}, + {file = "SQLAlchemy-1.4.54-cp38-cp38-win_amd64.whl", hash = "sha256:a01bc25eb7a5688656c8770f931d5cb4a44c7de1b3cec69b84cc9745d1e4cc10"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0b76bbb1cbae618d10679be8966f6d66c94f301cfc15cb49e2f2382563fb6efb"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb2886c0be2c6c54d0651d5a61c29ef347e8eec81fd83afebbf7b59b80b7393"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:954816850777ac234a4e32b8c88ac1f7847088a6e90cfb8f0e127a1bf3feddff"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d83cd1cc03c22d922ec94d0d5f7b7c96b1332f5e122e81b1a61fb22da77879a"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1576fba3616f79496e2f067262200dbf4aab1bb727cd7e4e006076686413c80c"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win32.whl", hash = "sha256:3112de9e11ff1957148c6de1df2bc5cc1440ee36783412e5eedc6f53638a577d"}, + {file = "SQLAlchemy-1.4.54-cp39-cp39-win_amd64.whl", hash = "sha256:6da60fb24577f989535b8fc8b2ddc4212204aaf02e53c4c7ac94ac364150ed08"}, + {file = "sqlalchemy-1.4.54.tar.gz", hash = "sha256:4470fbed088c35dc20b78a39aaf4ae54fe81790c783b3264872a0224f437c31a"}, ] +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + [package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)", "mariadb (>=1.0.1,!=1.1.2)"] mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mysql = ["mysqlclient"] -oracle = ["cx-oracle"] -postgresql = ["psycopg2"] -postgresql-pg8000 = ["pg8000 (<1.16.6)"] +mssql-pymssql = ["pymssql", "pymssql"] +mssql-pyodbc = ["pyodbc", "pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql-connector = ["mysql-connector-python", "mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "asyncpg", "greenlet (!=0.4.17)", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)", "pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-utils" @@ -1340,24 +1437,24 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1368,13 +1465,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -1396,20 +1493,24 @@ files = [ [[package]] name = "zipp" -version = "3.18.1" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "20e22d19f1fdead00ad0641a26fb89f7babcc1a2f8d3e99b0eae2c22953ea0d0" +content-hash = "acd140a2033030d1529c6bd6765df8492ce2e99a18c209b1dac7b79889a384fd" diff --git a/pyproject.toml b/pyproject.toml index 3fde0ae9..50afc1a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "indexd" -version = "5.0.4" +version = "5.1.0" description = "Gen3 Indexing Service" authors = ["CTDS UChicago "] license = "Apache-2.0" @@ -23,13 +23,12 @@ indexclient = "^2.1.0" jsonschema = "^3.2" flask = "^2.3.3" psycopg2 = "^2.7" -sqlalchemy = "~1.3.3" +sqlalchemy = "^1.4.0" sqlalchemy-utils = "^0.37.3" PyYAML = ">=5.3,<7" [tool.poetry.dev-dependencies] -cdisutilstest = {git = "https://github.com/uc-cdis/cdisutils-test", rev = "1.0.0"} coveralls = "^3.0.1" mock = "^4.0.2" pytest = "^6.2.4" diff --git a/tests/ci_commands_script.sh b/tests/ci_commands_script.sh index e82905b1..2e2c4766 100644 --- a/tests/ci_commands_script.sh +++ b/tests/ci_commands_script.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -poetry run pytest -vv --cov=indexd --cov-report xml tests +poetry run pytest -vv --cov=indexd --cov=migrations/versions --cov-append --cov-report xml tests diff --git a/tests/conftest.py b/tests/conftest.py index afe08d99..aec4206a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,22 +2,135 @@ import importlib import pytest import requests +from sqlalchemy import create_engine import mock from unittest.mock import patch +from cdislogging import get_logger + # indexd_server and indexd_client is needed as fixtures -from cdisutilstest.code.indexd_fixture import clear_database from gen3authz.client.arborist.client import ArboristClient from indexd import get_app from indexd import auth from indexd.auth.errors import AuthError -from tests import default_test_settings +from indexd.index.drivers.alchemy import Base as index_base +from indexd.auth.drivers.alchemy import Base as auth_base +from indexd.alias.drivers.alchemy import Base as alias_base +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver +from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver +from indexd.auth.drivers.alchemy import SQLAlchemyAuthDriver +from indexd.index.drivers.single_table_alchemy import SingleTableSQLAlchemyIndexDriver + + +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + +logger = get_logger(__name__, log_level="info") + + +def clear_database(): + """ + Clean up test data from unit test + """ + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + index_driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + # IndexD table needs to be delete in this order to avoid foreign key constraint error + table_delete_order = [ + "index_record_url_metadata", + "index_record_url", + "index_record_hash", + "index_record_authz", + "index_record_ace", + "index_record_alias", + "index_record_metadata", + "alias_record_hash", + "alias_record_host_authority", + "alias_record", + "index_record", + "drs_bundle_record", + "base_version", + "record", + ] + + for table_name in table_delete_order: + delete_statement = f"DELETE FROM {table_name}" + conn.execute(delete_statement) + + # Clear the Alias records + alias_driver = SQLAlchemyAliasDriver(POSTGRES_CONNECTION) + for model in alias_base.__subclasses__(): + table = model.__table__ + delete_statement = table.delete() + conn.execute(delete_statement) + + # Clear the Auth records + auth_driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + for model in auth_base.__subclasses__(): + table = model.__table__ + delete_statement = table.delete() + conn.execute(delete_statement) + + +@pytest.fixture(scope="function", params=["default_settings", "single_table_settings"]) +def combined_default_and_single_table_settings(request): + """ + Fixture to run a unit test with both multi-table and single-table driver + """ + + # Load the default settings + from indexd import default_settings + from tests import default_test_settings + + importlib.reload(default_settings) + importlib.reload(default_test_settings) + + if request.param == "default_settings": + default_settings.settings["use_single_table"] = False + default_settings.settings["config"]["INDEX"] = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } + + # Load the single-table settings + elif request.param == "single_table_settings": + default_settings.settings["use_single_table"] = True + default_settings.settings["config"]["INDEX"] = { + "driver": SingleTableSQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests", # pragma: allowlist secret + echo=True, + index_config={ + "DEFAULT_PREFIX": "testprefix:", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + } + + default_settings.settings = { + **default_settings.settings, + **default_test_settings.settings, + } + yield get_app(default_settings.settings) + + try: + clear_database() + except Exception as e: + logger.error(f"Failed to clear database with error {e}") @pytest.fixture(scope="function", autouse=True) def app(): from indexd import default_settings + from tests import default_test_settings importlib.reload(default_settings) default_settings.settings = { @@ -29,18 +142,30 @@ def app(): try: clear_database() - except Exception: - pass + except Exception as e: + logger.error(f"Failed to clear database with error {e}") @pytest.fixture def user(app): - app.auth.add("test", "test") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + try: + driver.add("test", "test") + except Exception as e: + logger.error(f"Failed to add test users with error {e}") + yield { "Authorization": ("Basic " + base64.b64encode(b"test:test").decode("ascii")), "Content-Type": "application/json", } - app.auth.delete("test") + + try: + driver.delete("test") + except Exception as e: + logger.error(f"Failed to delete test user with error {e}") + + engine.dispose() @pytest.fixture diff --git a/tests/default_test_settings.py b/tests/default_test_settings.py index 8dee48ea..13b57770 100644 --- a/tests/default_test_settings.py +++ b/tests/default_test_settings.py @@ -1,7 +1,6 @@ import os from indexd.default_settings import * -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver CONFIG["DIST"] = [ { @@ -37,4 +36,4 @@ # database used by the `/tests/postgres` tests settings["config"][ "TEST_DB" -] = "postgres://postgres:postgres@localhost:{0}/indexd_tests".format(psql_port) +] = "postgresql://postgres:postgres@localhost:{0}/indexd_tests".format(psql_port) diff --git a/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py new file mode 100644 index 00000000..7035ee28 --- /dev/null +++ b/tests/postgres/migrations/test_bb3d7586a096_createsingletable.py @@ -0,0 +1,58 @@ +from alembic.config import main as alembic_main + + +def test_upgrade(postgres_driver): + """ + Make sure single table migration created record table and has the correct schema. + """ + conn = postgres_driver.engine.connect() + + # state before migration + alembic_main(["--raiseerr", "downgrade", "a72f117515c5"]) + + # state after migration + alembic_main(["--raiseerr", "upgrade", "bb3d7586a096"]) # pragma: allowlist secret + + get_columns = "SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'public' AND table_name = 'record'" + + expected_schema = [ + ("guid", "character varying"), + ("baseid", "character varying"), + ("rev", "character varying"), + ("form", "character varying"), + ("size", "bigint"), + ("created_date", "timestamp without time zone"), + ("updated_date", "timestamp without time zone"), + ("file_name", "character varying"), + ("version", "character varying"), + ("uploader", "character varying"), + ("description", "character varying"), + ("content_created_date", "timestamp without time zone"), + ("content_updated_date", "timestamp without time zone"), + ("hashes", "jsonb"), + ("acl", "ARRAY"), + ("authz", "ARRAY"), + ("urls", "ARRAY"), + ("record_metadata", "jsonb"), + ("url_metadata", "jsonb"), + ("alias", "ARRAY"), + ] + + table_res = conn.execute(get_columns) + actual_schema = sorted([i for i in table_res]) + assert sorted(expected_schema) == actual_schema + + +def test_downgrade(postgres_driver): + """ + Test downgrade to before single table. record table should not exist before this upgrade + """ + conn = postgres_driver.engine.connect() + alembic_main(["--raiseerr", "downgrade", "a72f117515c5"]) + + # the database should not contain the 'record' table + tables_res = conn.execute( + "SELECT * FROM pg_catalog.pg_tables WHERE schemaname != 'pg_catalog' AND schemaname != 'information_schema';" + ) + tables = [i[1] for i in tables_res] + assert "record" not in tables diff --git a/tests/postgres/migrations/test_legacy_schema_migration.py b/tests/postgres/migrations/test_legacy_schema_migration.py index a4460d7c..76e20ff9 100644 --- a/tests/postgres/migrations/test_legacy_schema_migration.py +++ b/tests/postgres/migrations/test_legacy_schema_migration.py @@ -5,6 +5,7 @@ from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import create_engine import sqlite3 import tests.util as util from indexd.index.drivers.alchemy import ( @@ -52,9 +53,12 @@ ], } +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret -def update_version_table_for_testing(db, tb_name, val): - with sqlite3.connect(db) as conn: + +def update_version_table_for_testing(tb_name, val): + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: conn.execute( """\ CREATE TABLE IF NOT EXISTS {} (version INT)\ @@ -76,7 +80,6 @@ def update_version_table_for_testing(db, tb_name, val): tb_name, val ) ) - conn.commit() def test_migrate_acls(client, user, postgres_driver): @@ -105,7 +108,6 @@ def test_migrate_acls(client, user, postgres_driver): assert rec["metadata"] == {} -@util.removes("index.sq3") def test_migrate_index(): def test_migrate_index_internal(monkeypatch): called = [] @@ -121,8 +123,8 @@ def mock_migrate(**kwargs): [mock_migrate, mock_migrate], ) - update_version_table_for_testing("index.sq3", "index_schema_version", 0) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + update_version_table_for_testing("index_schema_version", 0) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 2 with driver.session as s: @@ -133,7 +135,6 @@ def mock_migrate(**kwargs): return test_migrate_index_internal -@util.removes("index.sq3") def test_migrate_index_only_diff(): def test_migrate_index_only_diff_internal(monkeypatch): called = [] @@ -153,9 +154,9 @@ def mock_migrate_2(**kwargs): [mock_migrate, mock_migrate_2], ) - update_version_table_for_testing("index.sq3", "index_schema_version", 0) + update_version_table_for_testing("index_schema_version", 0) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 1 assert len(called_2) == 0 @@ -163,8 +164,8 @@ def mock_migrate_2(**kwargs): called_2 = [] monkeypatch.setattr("indexd.index.drivers.alchemy.CURRENT_SCHEMA_VERSION", 2) - update_version_table_for_testing("index.sq3", "index_schema_version", 1) - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + update_version_table_for_testing("index_schema_version", 1) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) assert len(called) == 0 assert len(called_2) == 1 @@ -175,7 +176,6 @@ def mock_migrate_2(**kwargs): return test_migrate_index_only_diff_internal -@util.removes("alias.sq3") def test_migrate_alias(): def test_migrate_alias_internal(monkeypatch): called = [] @@ -190,9 +190,9 @@ def mock_migrate(**kwargs): monkeypatch.setattr("indexd.utils.check_engine_for_migrate", lambda _: True) - update_version_table_for_testing("alias.sq3", "alias_schema_version", 0) + update_version_table_for_testing("alias_schema_version", 0) - driver = SQLAlchemyAliasDriver("sqlite:///alias.sq3") + driver = SQLAlchemyAliasDriver(POSTGRES_CONNECTION) assert len(called) == 1 with driver.session as s: v = s.query(AliasSchemaVersion).first() diff --git a/tests/test_blueprint.py b/tests/test_blueprint.py index cfb85662..f5ffd557 100644 --- a/tests/test_blueprint.py +++ b/tests/test_blueprint.py @@ -13,9 +13,17 @@ DIST_CONFIG = [] -INDEX_CONFIG = {"driver": SQLAlchemyIndexDriver("sqlite:///index.sq3")} - -ALIAS_CONFIG = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3")} +INDEX_CONFIG = { + "driver": SQLAlchemyIndexDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) +} + +ALIAS_CONFIG = { + "driver": SQLAlchemyAliasDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) +} @util.removes("index.sq3") diff --git a/tests/test_bundles.py b/tests/test_bundles.py index 84ef3f72..57d25738 100644 --- a/tests/test_bundles.py +++ b/tests/test_bundles.py @@ -47,7 +47,7 @@ def create_index(client, user, add_bundle=False): return did_list, rec1 -def test_bundle_post(client, user): +def test_bundle_post(client, user, combined_default_and_single_table_settings): """ Bundle 1 +-object1 @@ -59,7 +59,9 @@ def test_bundle_post(client, user): assert res2.status_code == 200 -def test_bundle_get_post_with_optional_fields(client, user): +def test_bundle_get_post_with_optional_fields( + client, user, combined_default_and_single_table_settings +): """ Bundle 1 +-object1 @@ -109,7 +111,9 @@ def test_bundle_get_post_with_optional_fields(client, user): assert "aliases" not in content -def test_bundle_post_self_reference(client, user): +def test_bundle_post_self_reference( + client, user, combined_default_and_single_table_settings +): """ Make sure this doesnt exist Bundle 1 @@ -127,7 +131,9 @@ def test_bundle_post_self_reference(client, user): assert res2.status_code == 400 -def test_bundle_post_defined_size_checksum(client, user): +def test_bundle_post_defined_size_checksum( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -141,7 +147,9 @@ def test_bundle_post_defined_size_checksum(client, user): assert res2.status_code == 200 -def test_bundle_post_different_checksum_types(client, user): +def test_bundle_post_different_checksum_types( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -162,7 +170,9 @@ def test_bundle_post_different_checksum_types(client, user): } -def test_bundle_post_multiple_checksum_types(client, user): +def test_bundle_post_multiple_checksum_types( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) data = { @@ -193,7 +203,9 @@ def test_bundle_post_multiple_checksum_types(client, user): ] -def test_bundle_post_checksum_with_incorrect_schema(client, user): +def test_bundle_post_checksum_with_incorrect_schema( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4) @@ -222,7 +234,9 @@ def test_bundle_post_checksum_with_incorrect_schema(client, user): assert res.status_code == 404 -def test_bundle_bundle_data_not_found(client, user): +def test_bundle_bundle_data_not_found( + client, user, combined_default_and_single_table_settings +): bundle_id = str(uuid.uuid4) data = { "name": "test_bundle", @@ -235,7 +249,9 @@ def test_bundle_bundle_data_not_found(client, user): assert res2.status_code == 404 -def test_post_drs_no_duplicate_bundles(client, user): +def test_post_drs_no_duplicate_bundles( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) data = get_bundle_doc(bundles=[did_list[0], did_list[0], did_list[0]]) @@ -243,13 +259,17 @@ def test_post_drs_no_duplicate_bundles(client, user): assert res2.status_code == 400 -def test_bundle_post_invalid_input(client, user): +def test_bundle_post_invalid_input( + client, user, combined_default_and_single_table_settings +): data = {} res2 = client.post("/bundle/", json=data, headers=user) assert res2.status_code == 400 -def test_bundle_post_no_bundle_data(client, user): +def test_bundle_post_no_bundle_data( + client, user, combined_default_and_single_table_settings +): data = { "name": "test_bundle", "bundles": [], @@ -259,7 +279,7 @@ def test_bundle_post_no_bundle_data(client, user): assert res2.json["error"] == "Bundle data required." -def test_bundle_get(client, user): +def test_bundle_get(client, user, combined_default_and_single_table_settings): """ Post with bundle_id and get. Bundle1 @@ -286,7 +306,7 @@ def test_bundle_get(client, user): assert rec2["size"] == 123 -def test_bundle_get_form_type(client, user): +def test_bundle_get_form_type(client, user, combined_default_and_single_table_settings): """ form = object when object form = bundle when bundle @@ -308,7 +328,9 @@ def test_bundle_get_form_type(client, user): assert rec2["form"] == "bundle" -def test_bundle_get_no_bundle_id(client, user): +def test_bundle_get_no_bundle_id( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -320,7 +342,9 @@ def test_bundle_get_no_bundle_id(client, user): assert res2.status_code == 404 -def test_bundle_get_expand_false(client, user): +def test_bundle_get_expand_false( + client, user, combined_default_and_single_table_settings +): did_list, rec = create_index(client, user) res1 = client.get("/ga4gh/drs/v1/objects/" + rec["did"]) @@ -338,7 +362,9 @@ def test_bundle_get_expand_false(client, user): assert "bundle_data" not in rec2 -def test_redirect_to_bundle_from_index(client, user): +def test_redirect_to_bundle_from_index( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -353,7 +379,9 @@ def test_redirect_to_bundle_from_index(client, user): assert res3.status_code == 200 -def test_bundle_from_drs_endpoint(client, user): +def test_bundle_from_drs_endpoint( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) data = get_bundle_doc(did_list, bundle_id=bundle_id) @@ -368,7 +396,7 @@ def test_bundle_from_drs_endpoint(client, user): assert res3.status_code == 200 -def test_get_bundle_list(client, user): +def test_get_bundle_list(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -411,7 +439,7 @@ def test_get_bundle_list(client, user): assert len(rec5["records"]) == n_records + n_bundles -def test_multiple_bundle_data(client, user): +def test_multiple_bundle_data(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -441,7 +469,7 @@ def test_multiple_bundle_data(client, user): assert data["id"] in did_list -def test_bundle_delete(client, user): +def test_bundle_delete(client, user, combined_default_and_single_table_settings): n_records = 6 n_delete = 2 bundle_ids = [] @@ -471,18 +499,24 @@ def test_bundle_delete(client, user): assert len(rec3["records"]) == n_records - n_delete -def test_bundle_delete_invalid_bundle_id(client, user): +def test_bundle_delete_invalid_bundle_id( + client, user, combined_default_and_single_table_settings +): bundle_id = "12938hd981h123hd18hd80h028" res = client.delete("/bundle/" + bundle_id, headers=user) assert res.status_code == 404 -def test_bundle_delete_no_bundle_id(client, user): +def test_bundle_delete_no_bundle_id( + client, user, combined_default_and_single_table_settings +): res = client.delete("/bundle/", headers=user) assert res.status_code == 405 -def test_bundle_data_bundle_and_index(client, user): +def test_bundle_data_bundle_and_index( + client, user, combined_default_and_single_table_settings +): """ bundle_main +-bundle1 @@ -521,7 +555,7 @@ def test_bundle_data_bundle_and_index(client, user): assert rec3["size"] == len(rec3["contents"]) * 123 -def test_nested_bundle_data(client, user): +def test_nested_bundle_data(client, user, combined_default_and_single_table_settings): """ bundle1 +-bundle2 @@ -557,7 +591,9 @@ def test_nested_bundle_data(client, user): rec3 = rec3[key][0] -def test_bundle_no_bundle_name(client, user): +def test_bundle_no_bundle_name( + client, user, combined_default_and_single_table_settings +): did_list, _ = create_index(client, user) bundle_id = str(uuid.uuid4()) @@ -632,7 +668,9 @@ def content_validation(contents): return True -def test_get_drs_expand_contents_default(client, user): +def test_get_drs_expand_contents_default( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 @@ -645,7 +683,9 @@ def test_get_drs_expand_contents_default(client, user): assert len(contents) == 3 -def test_get_drs_expand_contents_false(client, user): +def test_get_drs_expand_contents_false( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 @@ -658,7 +698,9 @@ def test_get_drs_expand_contents_false(client, user): assert len(contents) == 0 -def test_get_drs_expand_contents_true(client, user): +def test_get_drs_expand_contents_true( + client, user, combined_default_and_single_table_settings +): bundle_id = build_bundle(client, user) res = client.get("/bundle/" + bundle_id) assert res.status_code == 200 diff --git a/tests/test_client.py b/tests/test_client.py index 93cf119c..0b900328 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -29,14 +29,16 @@ def get_doc( return doc -def test_index_list(client): +def test_index_list(client, combined_default_and_single_table_settings): res = client.get("/index/") assert res.status_code == 200 rec = res.json assert rec["records"] == [] -def test_index_list_with_params(client, user): +def test_index_list_with_params( + client, user, combined_default_and_single_table_settings +): data1 = get_doc() data1["urls"] = [ "s3://endpointurl/bucket_2/key_2", @@ -119,7 +121,7 @@ def test_index_list_with_params(client, user): assert data_list["records"][0]["urls_metadata"] == data1["urls_metadata"] -def test_get_list_form_param(client, user): +def test_get_list_form_param(client, user, combined_default_and_single_table_settings): """ bundle1 +-object1 @@ -155,7 +157,9 @@ def test_get_list_form_param(client, user): assert len(rec3["records"]) == 2 * n_records -def test_get_list_form_with_params(client, user): +def test_get_list_form_with_params( + client, user, combined_default_and_single_table_settings +): n_records = 6 for _ in range(n_records): did_list, _ = create_index(client, user) @@ -232,7 +236,7 @@ def test_get_list_form_with_params(client, user): assert rec_1["did"] in ids -def test_index_list_by_size(client, user): +def test_index_list_by_size(client, user, combined_default_and_single_table_settings): # post two records of different size data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -248,7 +252,9 @@ def test_index_list_by_size(client, user): assert rec["records"][0]["size"] == 100 -def test_index_list_by_filename(client, user): +def test_index_list_by_filename( + client, user, combined_default_and_single_table_settings +): # post three records of different name data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -267,7 +273,7 @@ def test_index_list_by_filename(client, user): assert rec["records"][0]["file_name"] == data["file_name"] -def test_index_list_by_authz(client, user): +def test_index_list_by_authz(client, user, combined_default_and_single_table_settings): # post three records of different authz data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -286,7 +292,9 @@ def test_index_list_by_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data["authz"]) -def test_index_list_by_multiple_authz(client, user): +def test_index_list_by_multiple_authz( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["authz"] = ["abc"] @@ -307,7 +315,9 @@ def test_index_list_by_multiple_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data["authz"]) -def test_index_list_by_multiple_acl(client, user): +def test_index_list_by_multiple_acl( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["acl"] = ["abc"] @@ -328,7 +338,7 @@ def test_index_list_by_multiple_acl(client, user): assert sorted(rec["records"][0]["acl"]) == sorted(data["acl"]) -def test_index_list_by_urls(client, user): +def test_index_list_by_urls(client, user, combined_default_and_single_table_settings): data = get_doc() data["urls"] = ["s3://bucket1"] @@ -361,7 +371,9 @@ def test_index_list_by_urls(client, user): assert sorted(rec["records"][0]["urls"]) == sorted(data["urls"]) -def test_index_list_by_version(client, user): +def test_index_list_by_version( + client, user, combined_default_and_single_table_settings +): # post three records of different version data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -380,7 +392,9 @@ def test_index_list_by_version(client, user): assert rec["records"][0]["version"] == data["version"] -def test_index_list_with_params_negate(client, user): +def test_index_list_with_params_negate( + client, user, combined_default_and_single_table_settings +): data = get_doc() res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -481,7 +495,7 @@ def test_index_list_with_params_negate(client, user): assert rec_5["did"] in ids -def test_index_list_invalid_param(client): +def test_index_list_invalid_param(client, combined_default_and_single_table_settings): # test 400 when limit > 1024 res = client.get("/index/?limit=1025") assert res.status_code == 400 @@ -503,7 +517,9 @@ def test_index_list_invalid_param(client): assert res.status_code == 400 -def test_negate_filter_file_name(client, user): +def test_negate_filter_file_name( + client, user, combined_default_and_single_table_settings +): # post two records of different file name data1 = get_doc() data1["file_name"] = "test_file_name_1" @@ -523,13 +539,12 @@ def test_negate_filter_file_name(client, user): res = client.get("/index/?negate_params=" + json.dumps(negate_param)) assert res.status_code == 200 rec = res.json - print(rec) # assert record returned with proper non-negated file name assert len(rec["records"]) == 1 assert rec["records"][0]["file_name"] == data1["file_name"] -def test_negate_filter_acl(client, user): +def test_negate_filter_acl(client, user, combined_default_and_single_table_settings): # post two records of different acl data1 = get_doc() data1["acl"] = ["read"] @@ -554,7 +569,7 @@ def test_negate_filter_acl(client, user): assert sorted(rec["records"][0]["acl"]) == sorted(data1["acl"]) -def test_negate_filter_authz(client, user): +def test_negate_filter_authz(client, user, combined_default_and_single_table_settings): # post two records of different authz data1 = get_doc() data1["authz"] = ["admin"] @@ -579,7 +594,9 @@ def test_negate_filter_authz(client, user): assert sorted(rec["records"][0]["authz"]) == sorted(data1["authz"]) -def test_negate_filter_version(client, user): +def test_negate_filter_version( + client, user, combined_default_and_single_table_settings +): # post two records of different version data1 = get_doc() data1["version"] = "3" @@ -604,7 +621,9 @@ def test_negate_filter_version(client, user): assert rec["records"][0]["version"] == data1["version"] -def test_list_entries_with_uploader(client, user): +def test_list_entries_with_uploader( + client, user, combined_default_and_single_table_settings +): """ Test that return a list of record given uploader """ @@ -637,7 +656,9 @@ def test_list_entries_with_uploader(client, user): assert data_list["records"][1]["uploader"] == "uploader_123" -def test_list_entries_with_uploader_wrong_uploader(client, user): +def test_list_entries_with_uploader_wrong_uploader( + client, user, combined_default_and_single_table_settings +): """ Test that returns no record due to wrong uploader id """ @@ -661,7 +682,7 @@ def test_list_entries_with_uploader_wrong_uploader(client, user): assert len(data_list["records"]) == 0 -def test_create_blank_record(client, user): +def test_create_blank_record(client, user, combined_default_and_single_table_settings): """ Test that new blank records only contain the uploader and optionally file_name fields: test without file name @@ -685,7 +706,9 @@ def test_create_blank_record(client, user): assert_blank(rec) -def test_create_blank_record_with_file_name(client, user): +def test_create_blank_record_with_file_name( + client, user, combined_default_and_single_table_settings +): """ Test that new blank records only contain the uploader and optionally file_name fields: test with file name @@ -709,7 +732,9 @@ def test_create_blank_record_with_file_name(client, user): assert_blank(rec) -def test_create_blank_record_with_authz(client, use_mock_authz): +def test_create_blank_record_with_authz( + client, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a new blank record can be created with a specified authz when the user has the expected access @@ -760,7 +785,7 @@ def test_create_blank_record_with_authz(client, use_mock_authz): assert res.status_code == 403, res.json -def test_create_blank_version(client, user): +def test_create_blank_version(client, user, combined_default_and_single_table_settings): """ Test that we can create a new, blank version of a record with POST /index/blank/{GUID}. The new blank version should @@ -833,7 +858,9 @@ def assert_acl_authz_and_baseid(acl, authz, baseid, guid): assert not new_blank_doc[field] -def test_create_blank_version_with_authz(client, user, use_mock_authz): +def test_create_blank_version_with_authz( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a new version of a blank record can be created with a different authz when the user has the expected access @@ -891,7 +918,9 @@ def test_create_blank_version_with_authz(client, user, use_mock_authz): assert not new_version[field] -def test_create_blank_version_specify_did(client, user): +def test_create_blank_version_specify_did( + client, user, combined_default_and_single_table_settings +): """ Test that we can specify the new GUID of a new, blank version of a record with POST /index/blank/{GUID}. @@ -940,7 +969,9 @@ def test_create_blank_version_specify_did(client, user): assert blank_doc_guid == specified_guid -def test_create_blank_version_specify_guid_already_exists(client, user): +def test_create_blank_version_specify_guid_already_exists( + client, user, combined_default_and_single_table_settings +): """ Test that if we try to specify the GUID of a new blank version, but the new GUID we specified already exists in the index, the operation fails with 409. @@ -999,7 +1030,9 @@ def test_create_blank_version_specify_guid_already_exists(client, user): ), "Request should have failed with 409 user error: {}".format(res.json) -def test_create_blank_version_no_existing_record(client, user): +def test_create_blank_version_no_existing_record( + client, user, combined_default_and_single_table_settings +): """ Test that attempts to create a blank version of a nonexisting GUID should fail with 404. @@ -1015,7 +1048,9 @@ def test_create_blank_version_no_existing_record(client, user): ), "Expected to fail to create new blank version, instead got {}".format(res.json) -def test_create_blank_version_blank_record(client, user): +def test_create_blank_version_blank_record( + client, user, combined_default_and_single_table_settings +): """ Test that attempts to create a blank version of a blank record should succeed @@ -1068,7 +1103,9 @@ def test_create_blank_version_blank_record(client, user): assert not blank_doc[field] -def test_fill_size_n_hash_for_blank_record(client, user): +def test_fill_size_n_hash_for_blank_record( + client, user, combined_default_and_single_table_settings +): """ Test that can fill size and hashes for empty record """ @@ -1098,7 +1135,9 @@ def test_fill_size_n_hash_for_blank_record(client, user): assert rec["hashes"]["md5"] == "8b9942cf415384b27cadf1f4d2d981f5" -def test_update_blank_record_with_authz(client, user, use_mock_authz): +def test_update_blank_record_with_authz( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a blank record (WITHOUT AUTHZ) can be updated with an authz when the user has the expected access @@ -1158,7 +1197,9 @@ def test_update_blank_record_with_authz(client, user, use_mock_authz): assert rec["authz"] == [new_authz2] # authz as provided -def test_update_blank_record_with_authz_new(client, user, use_mock_authz): +def test_update_blank_record_with_authz_new( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ Test that a blank record (WITH AUTHZ) can be updated with a different authz when the user has the expected access @@ -1222,7 +1263,9 @@ def test_update_blank_record_with_authz_new(client, user, use_mock_authz): assert rec["authz"] == [new_authz2] # authz as provided -def test_get_empty_acl_authz_record(client, user): +def test_get_empty_acl_authz_record( + client, user, combined_default_and_single_table_settings +): """ Test that can get a list of empty acl/authz given uploader """ @@ -1259,7 +1302,9 @@ def test_get_empty_acl_authz_record(client, user): assert data_list["records"][1]["authz"] == [] -def test_get_empty_acl_authz_record_after_fill_size_n_hash(client, user): +def test_get_empty_acl_authz_record_after_fill_size_n_hash( + client, user, combined_default_and_single_table_settings +): """ Test create blank record -> fill hash and size -> get record with empty or none acl/authz @@ -1328,7 +1373,6 @@ def test_get_empty_acl_authz_record_after_fill_size_n_hash(client, user): res = client.get("/index/?uploader=uploader_123") assert res.status_code == 200 rec = res.json - print(rec) assert len(rec["records"]) == 3 res = client.get("/index/?uploader=uploader_123&acl=read") @@ -1353,7 +1397,9 @@ def test_get_empty_acl_authz_record_after_fill_size_n_hash(client, user): assert len(ids) == 2 -def test_cant_update_inexistent_blank_record(client, user): +def test_cant_update_inexistent_blank_record( + client, user, combined_default_and_single_table_settings +): # test that non-existent did throws 400 error data = {"size": 123, "hashes": {"md5": "8b9942cf415384b27cadf1f4d2d682e5"}} fake_did = "testprefix:455ffb35-1b0e-49bd-a4ab-3afe9f3aece9" @@ -1364,7 +1410,7 @@ def test_cant_update_inexistent_blank_record(client, user): assert res.status_code == 404 -def test_update_urls_metadata(client, user): +def test_update_urls_metadata(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) res = client.post("/index/", json=data, headers=user) assert res.status_code == 200 @@ -1435,7 +1481,13 @@ def test_update_urls_metadata(client, user): ], ) def test_urls_metadata_partial_match( - client, doc_urls, urls_meta, params, expected, user + client, + doc_urls, + urls_meta, + params, + expected, + user, + combined_default_and_single_table_settings, ): url_doc_mapping = {} for url_group in doc_urls: @@ -1456,10 +1508,13 @@ def test_urls_metadata_partial_match( rec = res.json ids = {r["did"] for r in rec["records"]} + + r = client.get("/index/") + assert ids == {url_doc_mapping[url]["did"] for url in expected} -def test_get_urls(client, user): +def test_get_urls(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) response = client.post("/index/", json=data, headers=user) assert response.status_code == 200 @@ -1480,7 +1535,7 @@ def test_get_urls(client, user): assert record["urls"][0]["metadata"] == data["urls_metadata"][url] -def test_get_urls_size_0(client, user): +def test_get_urls_size_0(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True) data["size"] = 0 response = client.post("/index/", json=data, headers=user) @@ -1495,7 +1550,7 @@ def test_get_urls_size_0(client, user): assert record["urls"][0]["metadata"] == data["urls_metadata"][url] -def test_index_create(client, user): +def test_index_create(client, user, combined_default_and_single_table_settings): data = get_doc(has_baseid=True) res = client.post("/index/", json=data, headers=user) @@ -1510,7 +1565,9 @@ def test_index_create(client, user): assert rec["authz"] == [] -def test_index_list_with_start(client, user): +def test_index_list_with_start( + client, user, combined_default_and_single_table_settings +): data = { "did": "testprefix:11111111-1111-1111-1111-111111111111", "form": "object", @@ -1542,7 +1599,7 @@ def test_index_list_with_start(client, user): assert rec3["did"] in dids -def test_index_list_with_page(client, user): +def test_index_list_with_page(client, user, combined_default_and_single_table_settings): data = { "did": "testprefix:11111111-1111-1111-1111-111111111111", "form": "object", @@ -1582,14 +1639,14 @@ def test_index_list_with_page(client, user): assert rec3["did"] in dids -def test_unauthorized_create(client): +def test_unauthorized_create(client, combined_default_and_single_table_settings): # test that unauthorized post throws 403 error data = get_doc() res = client.post("/index/", json=data) assert res.status_code == 403 -def test_index_get(client, user): +def test_index_get(client, user, combined_default_and_single_table_settings): data = get_doc(has_baseid=True) res = client.post("/index/", json=data, headers=user) @@ -1605,7 +1662,7 @@ def test_index_get(client, user): assert rec_2["did"] == rec["did"] -def test_get_id(client, user): +def test_get_id(client, user, combined_default_and_single_table_settings): # test getting an existing ID data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1625,7 +1682,7 @@ def test_get_id(client, user): assert res.status_code == 404 -def test_index_prepend_prefix(client, user): +def test_index_prepend_prefix(client, user, combined_default_and_single_table_settings): """ For index_config = { @@ -1658,7 +1715,9 @@ def test_index_prepend_prefix(client, user): assert rec_3["did"].startswith("testprefix:") -def test_index_get_with_baseid(client, user): +def test_index_get_with_baseid( + client, user, combined_default_and_single_table_settings +): data1 = get_doc(has_baseid=True) res = client.post("/index/", json=data1, headers=user) assert res.status_code == 200 @@ -1674,7 +1733,7 @@ def test_index_get_with_baseid(client, user): assert rec_2["did"] == rec_1["did"] -def test_delete_and_recreate(client, user): +def test_delete_and_recreate(client, user, combined_default_and_single_table_settings): """ Test that you can delete an IndexDocument and be able to recreate it with the same fields. @@ -1724,7 +1783,9 @@ def test_delete_and_recreate(client, user): assert new_data["hashes"]["md5"] == new_record["hashes"]["md5"] -def test_index_create_with_multiple_hashes(client, user): +def test_index_create_with_multiple_hashes( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["hashes"] = { "md5": "8b9942cf415384b27cadf1f4d2d682e5", @@ -1737,7 +1798,9 @@ def test_index_create_with_multiple_hashes(client, user): assert record["did"] -def test_index_create_with_valid_did(client, user): +def test_index_create_with_valid_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1747,7 +1810,9 @@ def test_index_create_with_valid_did(client, user): assert record["did"] == "3d313755-cbb4-4b08-899d-7bbac1f6e67d" -def test_index_create_with_acl_authz(client, user): +def test_index_create_with_acl_authz( + client, user, combined_default_and_single_table_settings +): data = { "acl": ["a", "b"], "authz": ["x", "y"], @@ -1767,7 +1832,9 @@ def test_index_create_with_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_index_create_with_duplicate_acl_authz(client, user): +def test_index_create_with_duplicate_acl_authz( + client, user, combined_default_and_single_table_settings +): data = { "acl": ["a", "b", "a"], "authz": ["x", "y", "x"], @@ -1787,7 +1854,9 @@ def test_index_create_with_duplicate_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_index_create_with_invalid_did(client, user): +def test_index_create_with_invalid_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b0fdfdfd8-899d-7bbac1f6e67dfdd" @@ -1795,7 +1864,9 @@ def test_index_create_with_invalid_did(client, user): assert response.status_code == 400 -def test_index_create_with_prefix(client, user): +def test_index_create_with_prefix( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "cdis:3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1805,7 +1876,9 @@ def test_index_create_with_prefix(client, user): assert record["did"] == "cdis:3d313755-cbb4-4b08-899d-7bbac1f6e67d" -def test_index_create_with_duplicate_did(client, user): +def test_index_create_with_duplicate_did( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -1815,7 +1888,9 @@ def test_index_create_with_duplicate_did(client, user): assert response.status_code == 409 -def test_index_create_with_file_name(client, user): +def test_index_create_with_file_name( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["file_name"] = "abc" @@ -1828,7 +1903,9 @@ def test_index_create_with_file_name(client, user): assert rec["file_name"] == "abc" -def test_index_create_with_version(client, user): +def test_index_create_with_version( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["version"] = "ver_123" @@ -1841,7 +1918,9 @@ def test_index_create_with_version(client, user): assert rec["version"] == data["version"] -def test_create_blank_record_with_baseid(client, user): +def test_create_blank_record_with_baseid( + client, user, combined_default_and_single_table_settings +): doc = {"uploader": "uploader_123", "baseid": "baseid_123"} res = client.post("/index/blank/", json=doc, headers=user) @@ -1854,7 +1933,9 @@ def test_create_blank_record_with_baseid(client, user): assert_blank(rec) -def test_index_create_with_uploader(client, user): +def test_index_create_with_uploader( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["uploader"] = "uploader_123" res = client.post("/index/", json=data, headers=user) @@ -1866,7 +1947,9 @@ def test_index_create_with_uploader(client, user): assert rec["uploader"] == data["uploader"] -def test_index_get_global_endpoint(client, user): +def test_index_get_global_endpoint( + client, user, combined_default_and_single_table_settings +): data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1883,7 +1966,9 @@ def test_index_get_global_endpoint(client, user): assert rec["hashes"]["md5"] == data["hashes"]["md5"] -def test_index_add_prefix_alias(client, user): +def test_index_add_prefix_alias( + client, user, combined_default_and_single_table_settings +): """ For index_config = { @@ -1914,7 +1999,7 @@ def test_index_add_prefix_alias(client, user): ] = previous_add_alias_cfg -def test_index_update(client, user): +def test_index_update(client, user, combined_default_and_single_table_settings): # create record data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -1971,7 +2056,9 @@ def test_index_update(client, user): assert rec_2["rev"] != rec["rev"] -def test_index_update_with_authz_check(client, user, use_mock_authz): +def test_index_update_with_authz_check( + client, user, use_mock_authz, combined_default_and_single_table_settings +): old_authz = "/programs/A" new_authz = "/programs/B" @@ -2006,7 +2093,9 @@ def test_index_update_with_authz_check(client, user, use_mock_authz): assert rec["authz"] == [new_authz] -def test_index_update_duplicate_acl_authz(client, user): +def test_index_update_duplicate_acl_authz( + client, user, combined_default_and_single_table_settings +): data = get_doc() res = client.post("/index/", json=data, headers=user) @@ -2037,7 +2126,9 @@ def test_index_update_duplicate_acl_authz(client, user): assert sorted(record["authz"]) == ["x", "y"] -def test_update_uploader_field(client, user): +def test_update_uploader_field( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["uploader"] = "uploader_123" res = client.post("/index/", json=data, headers=user) @@ -2074,7 +2165,7 @@ def test_update_uploader_field(client, user): assert rec["uploader"] is None -def test_index_delete(client, user): +def test_index_delete(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False) res = client.post("/index/", json=data, headers=user) @@ -2098,7 +2189,7 @@ def test_index_delete(client, user): assert res.status_code == 404 -def test_create_index_version(client, user): +def test_create_index_version(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False) res = client.post("/index/", json=data, headers=user) @@ -2126,7 +2217,7 @@ def test_create_index_version(client, user): assert rec_2["did"] == dataNew["did"] -def test_get_latest_version(client, user): +def test_get_latest_version(client, user, combined_default_and_single_table_settings): data = get_doc(has_metadata=False, has_baseid=False, has_version=True) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -2154,7 +2245,7 @@ def test_get_latest_version(client, user): assert rec_5["did"] == rec_1["did"] -def test_get_all_versions(client, user): +def test_get_all_versions(client, user, combined_default_and_single_table_settings): dids = [] # create 1st version @@ -2186,7 +2277,7 @@ def test_get_all_versions(client, user): assert record["did"] == dids[int(i)], "record id does not match" -def test_update_all_versions(client, user): +def test_update_all_versions(client, user, combined_default_and_single_table_settings): dids = [] mock_acl_A = ["mock_acl_A1", "mock_acl_A2"] mock_acl_B = ["mock_acl_B1", "mock_acl_B2"] @@ -2232,7 +2323,9 @@ def test_update_all_versions(client, user): assert sorted(version["authz"]) == sorted(mock_authz_B) -def test_update_all_versions_using_baseid(client, user): +def test_update_all_versions_using_baseid( + client, user, combined_default_and_single_table_settings +): mock_acl_A = ["mock_acl_A1", "mock_acl_A2"] mock_acl_B = ["mock_acl_B1", "mock_acl_B2"] mock_authz_A = ["mock_authz_A1", "mock_authz_A2"] @@ -2273,7 +2366,9 @@ def test_update_all_versions_using_baseid(client, user): assert sorted(version["authz"]) == sorted(mock_authz_B) -def test_update_all_versions_guid_not_found(client, user): +def test_update_all_versions_guid_not_found( + client, user, combined_default_and_single_table_settings +): bad_guid = "00000000-0000-0000-0000-000000000000" update_data = {"acl": ["mock_acl"], "authz": ["mock_authz"]} @@ -2286,7 +2381,9 @@ def test_update_all_versions_guid_not_found(client, user): ), "Expected update operation to fail with 404: {}".format(res.json) -def test_update_all_versions_fail_on_bad_metadata(client, user): +def test_update_all_versions_fail_on_bad_metadata( + client, user, combined_default_and_single_table_settings +): """ When making an update request, endpoint should return 400 (User error) if the metadata to update contains any fields that cannot be updated across all versions. @@ -2335,7 +2432,9 @@ def test_update_all_versions_fail_on_bad_metadata(client, user): assert sorted(version["authz"]) == sorted(mock_authz_A) -def test_update_all_versions_fail_on_missing_permissions(client, user, use_mock_authz): +def test_update_all_versions_fail_on_missing_permissions( + client, user, use_mock_authz, combined_default_and_single_table_settings +): """ If user does not have the 'update' permission on any record, request should fail with 403. @@ -2378,7 +2477,7 @@ def test_update_all_versions_fail_on_missing_permissions(client, user, use_mock_ ), "Expected operation to fail due to lack of user permissions: {}".format(res.json) -def test_index_stats(client, user): +def test_index_stats(client, user, combined_default_and_single_table_settings): # populate the index with three different size records data1 = get_doc() res = client.post("/index/", json=data1, headers=user) @@ -2420,7 +2519,7 @@ def test_index_stats(client, user): ("crc", "997a6f5c"), ], ) -def test_good_hashes(client, user, typ, h): +def test_good_hashes(client, user, typ, h, combined_default_and_single_table_settings): data = { "form": "object", "size": 123, @@ -2453,7 +2552,7 @@ def test_good_hashes(client, user, typ, h): ("sha512", "not valid"), ], ) -def test_bad_hashes(client, user, typ, h): +def test_bad_hashes(client, user, typ, h, combined_default_and_single_table_settings): data = { "form": "object", "size": 123, @@ -2474,7 +2573,7 @@ def test_bad_hashes(client, user, typ, h): assert "does not match" in json_resp["error"] -def test_dos_get(client, user): +def test_dos_get(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) res_1 = client.post("/index/", json=data, headers=user) @@ -2502,14 +2601,14 @@ def test_dos_get(client, user): assert rec_3["data_object"]["id"] == rec_1["did"] -def test_get_dos_record_error(client, user): +def test_get_dos_record_error(client, user, combined_default_and_single_table_settings): # test exception raised at nonexistent fake_did = "testprefix:d96bab16-c4e1-44ac-923a-04328b6fe78f" res = client.get("/ga4gh/dos/v1/dataobjects/" + fake_did) assert res.status_code == 404 -def test_dos_list(client, user): +def test_dos_list(client, user, combined_default_and_single_table_settings): data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) res_1 = client.post("/index/", json=data, headers=user) @@ -2535,7 +2634,9 @@ def test_dos_list(client, user): ) -def test_update_without_changing_fields(client, user): +def test_update_without_changing_fields( + client, user, combined_default_and_single_table_settings +): # setup test data = get_doc(has_urls_metadata=True, has_metadata=True, has_baseid=True) @@ -2580,7 +2681,7 @@ def test_update_without_changing_fields(client, user): assert second_doc["version"] != third_doc["version"] -def test_bulk_get_documents(client, user): +def test_bulk_get_documents(client, user, combined_default_and_single_table_settings): # just make a bunch of entries in indexd dids = [ client.post("/index/", json=get_doc(has_baseid=True), headers=user).json["did"] @@ -2597,7 +2698,9 @@ def test_bulk_get_documents(client, user): @pytest.mark.parametrize("authz", [["/some/path"], []]) -def test_indexd_admin_authz(client, mock_arborist_requests, authz): +def test_indexd_admin_authz( + client, mock_arborist_requests, authz, combined_default_and_single_table_settings +): """ Test that admin users can perform an operation even if they don't have explicit access to do it. @@ -2628,17 +2731,17 @@ def test_indexd_admin_authz(client, mock_arborist_requests, authz): assert res.status_code == 200 # authorized -def test_status_check(client): +def test_status_check(client, combined_default_and_single_table_settings): res = client.get("/_status/") assert res.status_code == 200 -def test_version_check(client): +def test_version_check(client, combined_default_and_single_table_settings): res = client.get("/_version") assert res.status_code == 200 -def test_get_dist(client): +def test_get_dist(client, combined_default_and_single_table_settings): res = client.get("/_dist") assert res.status_code == 200 and res.json == [ { @@ -2650,7 +2753,9 @@ def test_get_dist(client): ] -def test_changing_timestamps_updated_not_before_created(client, user): +def test_changing_timestamps_updated_not_before_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be updated to have a content_updated_date before the provided content_created_date """ @@ -2678,7 +2783,9 @@ def test_changing_timestamps_updated_not_before_created(client, user): assert update_obj_resp.status_code == 400 -def test_changing_none_timestamps(client, user): +def test_changing_none_timestamps( + client, user, combined_default_and_single_table_settings +): """ Checks that updates with null values are handled correctly """ @@ -2697,7 +2804,9 @@ def test_changing_none_timestamps(client, user): assert update_obj_resp.status_code == 200 -def test_changing_timestamps_no_updated_without_created(client, user): +def test_changing_timestamps_no_updated_without_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be updated to have a content_updated_date when a content_created_date does not exist for the record and one is not provided in the update. @@ -2714,7 +2823,9 @@ def test_changing_timestamps_no_updated_without_created(client, user): assert update_obj_resp.status_code == 400 -def test_timestamps_updated_not_before_created(client, user): +def test_timestamps_updated_not_before_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be created with a content_update_date that is before the content_created_date """ @@ -2725,7 +2836,9 @@ def test_timestamps_updated_not_before_created(client, user): assert create_obj_resp.status_code == 400 -def test_timestamps_no_updated_without_created(client, user): +def test_timestamps_no_updated_without_created( + client, user, combined_default_and_single_table_settings +): """ Checks that records cannot be created with a content_update_date without providing a content_created_date """ diff --git a/tests/test_creds.json b/tests/test_creds.json new file mode 100644 index 00000000..784abb7d --- /dev/null +++ b/tests/test_creds.json @@ -0,0 +1,6 @@ +{ + "db_host": "localhost", + "db_username": "postgres", + "db_database": "indexd_tests", + "fence_database": "fence_db" +} diff --git a/tests/test_driver_alchemy_auth.py b/tests/test_driver_alchemy_auth.py index ec81f819..1e20543c 100644 --- a/tests/test_driver_alchemy_auth.py +++ b/tests/test_driver_alchemy_auth.py @@ -1,7 +1,7 @@ -import sqlite3 import hashlib import pytest +from sqlalchemy import create_engine import tests.util as util @@ -13,61 +13,53 @@ USERNAME = "abc" PASSWORD = "123" DIGESTED = SQLAlchemyAuthDriver.digest(PASSWORD) +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret # TODO check if pytest has utilities for meta-programming of tests -@util.removes("auth.sq3") def test_driver_init_does_not_create_records(): """ Tests for creation of records after driver init. Tests driver init does not have unexpected side-effects. """ - driver = SQLAlchemyAuthDriver( - "sqlite:///auth.sq3" - ) # pylint: disable=unused-variable - with sqlite3.connect("auth.sq3") as conn: - count = conn.execute( - """ - SELECT COUNT(*) FROM auth_record - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM auth_record") + count = result.scalar() assert count == 0, "driver created records upon initilization" -@util.removes("auth.sq3") def test_driver_auth_accepts_good_creds(): """ Tests driver accepts good creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?,?) - """, - (USERNAME, DIGESTED), + + driver = SQLAlchemyAuthDriver(POSTGRES_CONNECTION) + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) driver.auth(USERNAME, PASSWORD) -@util.removes("auth.sq3") def test_driver_auth_rejects_bad_creds(): """ Test driver rejects bad creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?, ?) - """, - (USERNAME, DIGESTED), + driver = SQLAlchemyAuthDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) + + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) with pytest.raises(AuthError): @@ -77,19 +69,19 @@ def test_driver_auth_rejects_bad_creds(): driver.auth("invalid_" + USERNAME, PASSWORD) -@util.removes("auth.sq3") def test_driver_auth_returns_user_context(): """ Tests driver accepts good creds. """ - driver = SQLAlchemyAuthDriver("sqlite:///auth.sq3") - - with sqlite3.connect("auth.sq3") as conn: - conn.execute( - """ - INSERT INTO auth_record VALUES (?,?) - """, - (USERNAME, DIGESTED), + driver = SQLAlchemyAuthDriver( + "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + ) + + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute( + "INSERT INTO auth_record VALUES ('{}', '{}')".format(USERNAME, DIGESTED) ) user = driver.auth(USERNAME, PASSWORD) diff --git a/tests/test_driver_alchemy_crud.py b/tests/test_driver_alchemy_crud.py index 4355ab15..dde5e8a5 100644 --- a/tests/test_driver_alchemy_crud.py +++ b/tests/test_driver_alchemy_crud.py @@ -1,7 +1,7 @@ import uuid -import sqlite3 import pytest +from sqlalchemy import create_engine import tests.util as util @@ -17,78 +17,67 @@ # TODO check if pytest has utilities for meta-programming of tests +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + -@util.removes("index.sq3") def test_driver_init_does_not_create_records(): """ Tests for creation of records after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) - assert count == 0, "driver created records upon initilization" + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() + + assert count == 0, "driver created records upon initialization" -@util.removes("index.sq3") def test_driver_init_does_not_create_record_urls(): """ Tests for creation of urls after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record_url - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record_url") + count = result.scalar() assert count == 0, "driver created records urls upon initilization" -@util.removes("index.sq3") def test_driver_init_does_not_create_record_hashes(): """ Tests for creation of hashes after driver init. Tests driver init does not have unexpected side-effects. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3" - ) # pylint: disable=unused-variable - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record_hash - """ - ).fetchone()[0] + engine = create_engine(POSTGRES_CONNECTION) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record_hash") + count = result.scalar() assert count == 0, "driver created records hashes upon initilization" -@util.removes("index.sq3") def test_driver_add_object_record(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("object") - count = conn.execute( - """ - SELECT COUNT(*) FROM index_record - """ - ).fetchone()[0] + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() assert count == 1, "driver did not create record" @@ -105,42 +94,37 @@ def test_driver_add_object_record(): assert record[4] is None, "record size non-null" -@util.removes("index.sq3") def test_driver_add_bundle_record(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add_blank_bundle() - count = conn.execute( - """ - SELECT COUNT(*) FROM drs_bundle_record - """ - ).fetchone()[0] + result = conn.execute("SELECT COUNT(*) FROM drs_bundle_record") + count = result.scalar() assert count == 1, "driver did not create record" - record = conn.execute( - """ - SELECT * FROM drs_bundle_record - """ - ).fetchone() + result = conn.execute("SELECT * FROM drs_bundle_record").fetchone() - assert record != None - assert len(record) == 10 + assert result != None + assert len(result) == 10 -@util.removes("index.sq3") def test_driver_add_container_record(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("container") count = conn.execute( @@ -164,14 +148,14 @@ def test_driver_add_container_record(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") def test_driver_add_bundles_record(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: driver.add_bundle(name="bundle") count = conn.execute( @@ -194,14 +178,15 @@ def test_driver_add_bundles_record(): assert record[3], "record updated date not populated" -@util.removes("index.sq3") def test_driver_add_multipart_record(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: driver.add("multipart") count = conn.execute( @@ -225,12 +210,11 @@ def test_driver_add_multipart_record(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") def test_driver_add_with_valid_did(): """ Tests creation of a record with given valid did. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" did = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -239,12 +223,11 @@ def test_driver_add_with_valid_did(): assert s.query(IndexRecord).first().did == did -@util.removes("index.sq3") def test_driver_add_with_duplicate_did(): """ Tests creation of a record with duplicate did. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" did = "3d313755-cbb4-4b08-899d-7bbac1f6e67d" @@ -254,14 +237,14 @@ def test_driver_add_with_duplicate_did(): driver.add(form, did=did) -@util.removes("index.sq3") def test_driver_add_multiple_records(): """ Tests creation of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: driver.add("object") driver.add("object") driver.add("object") @@ -288,14 +271,14 @@ def test_driver_add_multiple_records(): assert record[4] == None, "record size non-null" -@util.removes("index.sq3") def test_driver_add_with_size(): """ Tests creation of a record with size. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: form = "object" size = 512 @@ -319,14 +302,14 @@ def test_driver_add_with_size(): assert size == new_size, "record size mismatch" -@util.removes("index.sq3") def test_driver_add_with_urls(): """ Tests creation of a record with urls. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: form = "object" urls = ["a", "b", "c"] @@ -360,12 +343,11 @@ def test_driver_add_with_urls(): assert urls == new_urls, "record urls mismatch" -@util.removes("index.sq3") def test_driver_add_with_filename(): """ Tests creation of a record with filename. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" file_name = "abc" @@ -374,12 +356,11 @@ def test_driver_add_with_filename(): assert s.query(IndexRecord).first().file_name == "abc" -@util.removes("index.sq3") def test_driver_add_with_version(): """ Tests creation of a record with version string. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) form = "object" version = "ver_123" @@ -388,14 +369,15 @@ def test_driver_add_with_version(): assert s.query(IndexRecord).first().version == "ver_123" -@util.removes("index.sq3") def test_driver_add_with_hashes(): """ Tests creation of a record with hashes. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: form = "object" hashes = {"a": "1", "b": "2", "c": "3"} @@ -429,31 +411,30 @@ def test_driver_add_with_hashes(): assert hashes == new_hashes, "record hashes mismatch" -@util.removes("index.sq3") def test_driver_get_record(): """ Tests retrieval of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] size = 512 form = "object" - baseid = str(uuid.uuid4()) created_date = datetime.now() updated_date = datetime.now() description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( - """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + "INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}')".format( did, baseid, rev, @@ -464,11 +445,9 @@ def test_driver_get_record(): content_created_date, content_updated_date, description, - ), + ) ) - conn.commit() - record = driver.get(did) assert record["did"] == did, "record id does not match" @@ -484,48 +463,49 @@ def test_driver_get_record(): ), "updated date does not match" -@util.removes("index.sq3") def test_driver_get_fails_with_no_records(): """ Tests retrieval of a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.get("some_record_that_does_not_exist") -@util.removes("index.sq3") def test_driver_nonstrict_get_without_prefix(): """ Tests retrieval of a record when a default prefix is set, but no prefix is supplied by the request. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] size = 512 form = "object" - baseid = str(uuid.uuid4()) created_date = datetime.now() updated_date = datetime.now() content_created_date = datetime.now() content_updated_date = datetime.now() description = "a description" + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( "testprefix/" + did, baseid, rev, @@ -539,8 +519,6 @@ def test_driver_nonstrict_get_without_prefix(): ), ) - conn.commit() - record = driver.get_with_nonstrict_prefix(did) assert record["did"] == "testprefix/" + did, "record id does not match" @@ -556,22 +534,22 @@ def test_driver_nonstrict_get_without_prefix(): ), "updated date does not match" -@util.removes("index.sq3") def test_driver_nonstrict_get_with_prefix(): """ Tests retrieval of a record when a default prefix is set and supplied by the request, but records are stored without prefixes. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": False, - "ADD_PREFIX_ALIAS": True, - }, - ) + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": False, + "ADD_PREFIX_ALIAS": True, + }, + ) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -583,11 +561,13 @@ def test_driver_nonstrict_get_with_prefix(): description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -601,8 +581,6 @@ def test_driver_nonstrict_get_with_prefix(): ), ) - conn.commit() - record = driver.get_with_nonstrict_prefix("testprefix/" + did) assert record["did"] == did, "record id does not match" @@ -618,21 +596,20 @@ def test_driver_nonstrict_get_with_prefix(): ), "updated date does not match" -@util.removes("index.sq3") def test_driver_nonstrict_get_with_incorrect_prefix(): """ Tests retrieval of a record fails if default prefix is set and request uses a different prefix with same uuid """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", - index_config={ - "DEFAULT_PREFIX": "testprefix/", - "PREPEND_PREFIX": True, - "ADD_PREFIX_ALIAS": False, - }, - ) - + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver( + POSTGRES_CONNECTION, + index_config={ + "DEFAULT_PREFIX": "testprefix/", + "PREPEND_PREFIX": True, + "ADD_PREFIX_ALIAS": False, + }, + ) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -642,26 +619,26 @@ def test_driver_nonstrict_get_with_incorrect_prefix(): created_date = datetime.now() updated_date = datetime.now() + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES (?,?,?,?,?,?,?) - """, - ("testprefix/" + did, baseid, rev, form, size, created_date, updated_date), + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + "testprefix/" + did, baseid, rev, form, size, created_date, updated_date + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get_with_nonstrict_prefix("wrongprefix/" + did) -@util.removes("index.sq3") def test_driver_nonstrict_get_with_no_default_prefix(): """ Tests retrieval of a record fails as expected if no default prefix is set """ driver = SQLAlchemyIndexDriver( - "sqlite:///index.sq3", + POSTGRES_CONNECTION, index_config={ "DEFAULT_PREFIX": None, "PREPEND_PREFIX": False, @@ -673,13 +650,14 @@ def test_driver_nonstrict_get_with_no_default_prefix(): driver.get_with_nonstrict_prefix("fake_id_without_prefix") -@util.removes("index.sq3") def test_driver_get_latest_version(): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) for _ in range(10): @@ -693,11 +671,15 @@ def test_driver_get_latest_version(): description = "a description" content_created_date = datetime.now() content_updated_date = datetime.now() + + conn.execute( + "INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid) + ) + conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -711,8 +693,6 @@ def test_driver_get_latest_version(): ), ) - conn.commit() - record = driver.get_latest_version(did) assert record["did"] == did, "record id does not match" @@ -727,14 +707,14 @@ def test_driver_get_latest_version(): ), "updated date does not match" -@util.removes("index.sq3") def test_driver_get_latest_version_with_no_record(): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: for _ in range(10): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -744,25 +724,29 @@ def test_driver_get_latest_version_with_no_record(): dt = datetime.now() conn.execute( - """ - INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES (?,?,?,?,?,?,?) - """, - (did, baseid, rev, form, size, dt, dt), + "INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid) ) - conn.commit() + conn.execute( + """ + INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size, dt, dt + ), + ) with pytest.raises(NoRecordFound): driver.get_latest_version("some base version") -@util.removes("index.sq3") def test_driver_get_all_versions(): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) NUMBER_OF_RECORD = 3 @@ -774,6 +758,9 @@ def test_driver_get_all_versions(): content_created_dates = [] content_updated_dates = [] descriptions = [] + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + for _ in range(NUMBER_OF_RECORD): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -790,12 +777,12 @@ def test_driver_get_all_versions(): updated_dates.append(updated_date) content_created_dates.append(content_created_date) descriptions.append(description) + conn.execute( """ INSERT INTO index_record(did, baseid, rev, form, size, created_date, updated_date, content_created_date, content_updated_date, description) \ - VALUES (?,?,?,?,?,?,?,?,?,?) - """, - ( + VALUES ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}') + """.format( did, baseid, rev, @@ -809,8 +796,6 @@ def test_driver_get_all_versions(): ), ) - conn.commit() - records = driver.get_all_versions(did) assert len(records) == NUMBER_OF_RECORD, "the number of records does not match" @@ -828,15 +813,18 @@ def test_driver_get_all_versions(): ), "updated date does not match" -@util.removes("index.sq3") def test_driver_get_all_versions_with_no_record(): """ Tests retrieval of the lattest record version """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + + with engine.connect() as conn: baseid = str(uuid.uuid4()) + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) + for _ in range(3): did = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] @@ -845,69 +833,74 @@ def test_driver_get_all_versions_with_no_record(): conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, size), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get_all_versions("some baseid") -@util.removes("index.sq3") def test_driver_get_fails_with_invalid_id(): """ Tests retrieval of a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.get("some_record_that_does_not_exist") -def test_driver_update_record(skip_authz): +def test_driver_update_record( + skip_authz, +): _test_driver_update_record() -@util.removes("index.sq3") def _test_driver_update_record(): """ Tests updating of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - # update_size = 256 update_urls = ["a", "b", "c"] # update_hashes = {"a": "1", "b": "2", "c": "3"} @@ -954,12 +947,11 @@ def _test_driver_update_record(): assert version == new_version, "version does not match" -@util.removes("index.sq3") def test_driver_update_fails_with_no_records(): """ Tests updating a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.update( @@ -967,84 +959,92 @@ def test_driver_update_fails_with_no_records(): ) -@util.removes("index.sq3") def test_driver_update_fails_with_invalid_id(): """ Tests updating a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.update("some_record_that_does_not_exist", "some_record_version", rev) -@util.removes("index.sq3") def test_driver_update_fails_with_invalid_rev(): """ Tests updating a record fails if the record rev is not invalid. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(RevisionMismatch): driver.update(did, baseid, "some_revision") -def test_driver_delete_record(skip_authz): +def test_driver_delete_record( + skip_authz, +): _test_driver_delete_record() -@util.removes("index.sq3") def _test_driver_delete_record(): """ Tests deletion of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - driver.delete(did, rev) count = conn.execute( @@ -1056,92 +1056,95 @@ def _test_driver_delete_record(): assert count == 0, "records remain after deletion" -@util.removes("index.sq3") def test_driver_delete_fails_with_no_records(): """ Tests deletion of a record fails if there are no records. """ - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) with pytest.raises(NoRecordFound): driver.delete("some_record_that_does_not_exist", "some_revision") -@util.removes("index.sq3") def test_driver_delete_fails_with_invalid_id(): """ Tests deletion of a record fails if the record id is not found. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(NoRecordFound): driver.delete("some_record_that_does_not_exist", rev) -@util.removes("index.sq3") def test_driver_delete_fails_with_invalid_rev(): """ Tests deletion of a record fails if the record rev is not invalid. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: did = str(uuid.uuid4()) baseid = str(uuid.uuid4()) rev = str(uuid.uuid4())[:8] form = "object" + size = 512 + + conn.execute("INSERT INTO base_version(baseid) VALUES ('{}')".format(baseid)) conn.execute( """ - INSERT INTO index_record(did, baseid, rev, form, size) VALUES (?,?,?,?,?) - """, - (did, baseid, rev, form, None), + INSERT INTO index_record(did, baseid, rev, form, size) VALUES ('{}','{}','{}','{}','{}') + """.format( + did, baseid, rev, form, size + ), ) - conn.commit() - with pytest.raises(RevisionMismatch): driver.delete(did, "some_revision") -@util.removes("index.sq3") def test_driver_get_bundle(): """ Tests retrieval of a record. """ - with sqlite3.connect("index.sq3") as conn: - driver = SQLAlchemyIndexDriver("sqlite:///index.sq3") + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + with engine.connect() as conn: bundle_id = str(uuid.uuid4()) checksum = "iuhd91h9ufh928jidsoajh9du328" size = 512 name = "object" created_time = updated_time = datetime.now() - bundle_data = "{'bundle_data': [{'access_methods': [{'access_id': 's3', 'access_url': {'url': 's3://endpointurl/bucket/key'}, 'region': '', 'type': 's3'}], 'aliases': [], 'checksums': [{'checksum': '8b9942cf415384b27cadf1f4d2d682e5', 'type': 'md5'}], 'contents': [], 'created_time': '2020-04-23T21:42:36.506404', 'description': '', 'id': 'testprefix:7e677693-9da3-455a-b51c-03467d5498b0', 'mime_type': 'application/json', 'name': None, 'self_uri': 'drs://fictitious-commons.io/testprefix:7e677693-9da3-455a-b51c-03467d5498b0', 'size': 123, 'updated_time': '2020-04-23T21:42:36.506410', 'version': '3c995667'}], 'bundle_id': '1ff381ef-55c7-42b9-b33f-81ac0689d131', 'checksum': '65b464c1aea98176ef2fa38e8b6b9fc7', 'created_time': '2020-04-23T21:42:36.564808', 'name': 'test_bundle', 'size': 123, 'updated_time': '2020-04-23T21:42:36.564819'}" + bundle_data = '{"bundle_data": [{"access_methods": [{"access_id": "s3", "access_url": {"url": "s3://endpointurl/bucket/key"}, "region": "", "type": "s3"}], "aliases": [], "checksums": [{"checksum": "8b9942cf415384b27cadf1f4d2d682e5", "type": "md5"}], "contents": [], "created_time": "2020-04-23T21:42:36.506404", "description": "", "id": "testprefix:7e677693-9da3-455a-b51c-03467d5498b0", "mime_type": "application/json", "name": None, "self_uri": "drs://fictitious-commons.io/testprefix:7e677693-9da3-455a-b51c-03467d5498b0", "size": 123, "updated_time": "2020-04-23T21:42:36.506410", "version": "3c995667"}], "bundle_id": "1ff381ef-55c7-42b9-b33f-81ac0689d131", "checksum": "65b464c1aea98176ef2fa38e8b6b9fc7", "created_time": "2020-04-23T21:42:36.564808", "name": "test_bundle", "size": 123, "updated_time": "2020-04-23T21:42:36.564819"}' + conn.execute( """ - INSERT INTO drs_bundle_record(bundle_id, name, checksum, size, bundle_data, created_time, updated_time) VALUES (?,?,?,?,?,?,?) - """, - (bundle_id, name, checksum, size, bundle_data, created_time, updated_time), + INSERT INTO drs_bundle_record(bundle_id, name, checksum, size, bundle_data, created_time, updated_time) VALUES ('{}','{}','{}','{}','{}','{}','{}') + """.format( + bundle_id, name, checksum, size, bundle_data, created_time, updated_time + ), ) - conn.commit() - record = driver.get_bundle(bundle_id) assert record["id"] == bundle_id, "record id does not match" diff --git a/tests/test_drs.py b/tests/test_drs.py index 68a8c015..d78417f6 100644 --- a/tests/test_drs.py +++ b/tests/test_drs.py @@ -1,5 +1,6 @@ import flask import json + import tests.conftest import requests import responses @@ -65,7 +66,7 @@ def get_bundle(client, user, has_description=True): return bundle -def test_drs_get(client, user): +def test_drs_get(client, user, combined_default_and_single_table_settings): data = get_doc() res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -84,23 +85,37 @@ def test_drs_get(client, user): assert "contents" not in rec_2 -def test_drs_get_no_default(client, user): +def test_drs_get_no_default(client, user, combined_default_and_single_table_settings): # Change default index driver settings to use no prefix - settings["config"]["INDEX"]["driver"].config["DEFAULT_PREFIX"] = None - settings["config"]["INDEX"]["driver"].config["ADD_PREFIX_ALIAS"] = False + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "DEFAULT_PREFIX" + ] = None + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "PREPEND_PREFIX" + ] = False + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "ADD_PREFIX_ALIAS" + ] = False data = get_doc() - did = "ad8f4658-6acd-4f96-0dd8-3709890c959f" - data["did"] = did res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 + did = res_1.json["did"] + assert "testprefix:" not in did res_2 = client.get("/ga4gh/drs/v1/objects/" + did) assert res_2.status_code == 200 rec_2 = res_2.json assert rec_2["self_uri"] == "drs://" + did - settings["config"]["INDEX"]["driver"].config["DEFAULT_PREFIX"] = "testprefix:" - settings["config"]["INDEX"]["driver"].config["ADD_PREFIX_ALIAS"] = True + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "DEFAULT_PREFIX" + ] = "testprefix:" + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "PREPEND_PREFIX" + ] = True + combined_default_and_single_table_settings.config["INDEX"]["driver"].config[ + "ADD_PREFIX_ALIAS" + ] = True def verify_timestamps(expected_doc, did, client, has_updated_date=True): @@ -130,7 +145,7 @@ def verify_timestamps(expected_doc, did, client, has_updated_date=True): assert drs_resp.json["index_updated_time"] == record_resp.json["updated_date"] -def test_timestamps(client, user): +def test_timestamps(client, user, combined_default_and_single_table_settings): data = get_doc() create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -138,7 +153,7 @@ def test_timestamps(client, user): verify_timestamps(data, obj_did, client) -def test_changing_timestamps(client, user): +def test_changing_timestamps(client, user, combined_default_and_single_table_settings): data = get_doc() create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -156,7 +171,9 @@ def test_changing_timestamps(client, user): verify_timestamps(update_json, update_obj_did, client) -def test_timestamps_updated_sets_to_created(client, user): +def test_timestamps_updated_sets_to_created( + client, user, combined_default_and_single_table_settings +): """ Checks that content_updated_date is set to content_created_date when none is provided. """ @@ -167,7 +184,7 @@ def test_timestamps_updated_sets_to_created(client, user): verify_timestamps(data, obj_did, client, has_updated_date=False) -def test_timestamps_none(client, user): +def test_timestamps_none(client, user, combined_default_and_single_table_settings): data = get_doc(has_content_updated_date=False, has_content_created_date=False) create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -184,7 +201,7 @@ def test_timestamps_none(client, user): assert drs_resp.json["index_updated_time"] == record_resp.json["updated_date"] -def test_drs_get_description(client, user): +def test_drs_get_description(client, user, combined_default_and_single_table_settings): data = get_doc(has_description=True) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -195,7 +212,9 @@ def test_drs_get_description(client, user): assert rec_2["description"] == data["description"] -def test_drs_changing_description(client, user): +def test_drs_changing_description( + client, user, combined_default_and_single_table_settings +): data = get_doc(has_description=True) create_obj_resp = client.post("/index/", json=data, headers=user) assert create_obj_resp.status_code == 200 @@ -214,7 +233,9 @@ def test_drs_changing_description(client, user): assert drs_rec["description"] == update_json["description"] -def test_drs_get_no_description(client, user): +def test_drs_get_no_description( + client, user, combined_default_and_single_table_settings +): data = get_doc(has_description=False) res_1 = client.post("/index/", json=data, headers=user) assert res_1.status_code == 200 @@ -225,7 +246,7 @@ def test_drs_get_no_description(client, user): assert rec_2["description"] is None -def test_drs_get_bundle(client, user): +def test_drs_get_bundle(client, user, combined_default_and_single_table_settings): bundle = get_bundle(client, user) bundle_res = client.post("/bundle/", json=bundle, headers=user) assert bundle_res.status_code == 200 @@ -235,7 +256,9 @@ def test_drs_get_bundle(client, user): assert drs_res.json["description"] == bundle["description"] -def test_drs_get_bundle_no_description(client, user): +def test_drs_get_bundle_no_description( + client, user, combined_default_and_single_table_settings +): bundle = get_bundle(client, user, has_description=False) bundle_res = client.post("/bundle/", json=bundle, headers=user) assert bundle_res.status_code == 200 @@ -245,7 +268,9 @@ def test_drs_get_bundle_no_description(client, user): assert drs_res.json["description"] is "" -def test_drs_multiple_endpointurl(client, user): +def test_drs_multiple_endpointurl( + client, user, combined_default_and_single_table_settings +): object_urls = { "sftp": "sftp://endpointurl/bucket/key", "ftp": "ftp://endpointurl/bucket/key", @@ -267,7 +292,7 @@ def test_drs_multiple_endpointurl(client, user): assert url["access_url"]["url"] == object_urls[protocol] -def test_drs_list(client, user): +def test_drs_list(client, user, combined_default_and_single_table_settings): record_length = 7 data = get_doc() submitted_guids = [] @@ -296,14 +321,18 @@ def test_drs_list(client, user): assert len(rec_4["drs_objects"]) == record_length -def test_get_drs_record_not_found(client, user): +def test_get_drs_record_not_found( + client, user, combined_default_and_single_table_settings +): # test exception raised at nonexistent fake_did = "testprefix:d96bab16-c4e1-44ac-923a-04328b6fe78f" res = client.get("/ga4gh/drs/v1/objects/" + fake_did) assert res.status_code == 404 -def test_get_drs_with_encoded_slash(client, user): +def test_get_drs_with_encoded_slash( + client, user, combined_default_and_single_table_settings +): data = get_doc() data["did"] = "testprefix:ed8f4658-6acd-4f96-9dd8-3709890c959e" res_1 = client.post("/index/", json=data, headers=user) @@ -322,7 +351,7 @@ def test_get_drs_with_encoded_slash(client, user): assert rec_2["self_uri"] == "drs://testprefix:" + rec_1["did"].split(":")[1] -def test_drs_service_info_endpoint(client): +def test_drs_service_info_endpoint(client, combined_default_and_single_table_settings): """ Test drs service endpoint with drs service info friendly distribution information """ @@ -349,7 +378,9 @@ def test_drs_service_info_endpoint(client): assert res.json == expected_info -def test_drs_service_info_no_information_configured(client): +def test_drs_service_info_no_information_configured( + client, combined_default_and_single_table_settings +): """ Test drs service info endpoint when dist is not configured in the indexd config file """ diff --git a/tests/test_migrate_to_single_table.py b/tests/test_migrate_to_single_table.py new file mode 100644 index 00000000..37dbd1ff --- /dev/null +++ b/tests/test_migrate_to_single_table.py @@ -0,0 +1,148 @@ +import random +import uuid + +from sqlalchemy import create_engine + +from bin.migrate_to_single_table import IndexRecordMigrator +from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver + + +POSTGRES_CONNECTION = "postgresql://postgres:postgres@localhost:5432/indexd_tests" # pragma: allowlist secret + + +def create_record(n_records=1): + """ + Create n_records number of records in multitable + """ + + engine = create_engine(POSTGRES_CONNECTION) + driver = SQLAlchemyIndexDriver(POSTGRES_CONNECTION) + did_list = [] + for _ in range(n_records): + did = str(uuid.uuid4()) + baseid = str(uuid.uuid4()) + size = random.randint(0, 1024) + file_name = f"file_{random.randint(0, 1024)}" + index_metadata = { + "metadata_key": "metadata_value", + "some_other_key": "some_other_value", + } + hashes = {"md5": "some_md5", "sha1": "some_sha1"} + urls = ["s3://bucket/data.json", "gs://bucket/data.txt"] + urls_metadata = { + "s3://bucket/data.json": {"metadata_key": "metadata_value"}, + "gs://bucket/data.txt": {"metadata_key": "metadata_value"}, + } + version = str(uuid.uuid4())[:5] + acl = random.choice(["*", "phs00001", "phs00002", "phs00003"]) + authz = random.choice(["/open", "phs00001", "phs00002"]) + rev = str(uuid.uuid4())[:8] + uploader = "uploader" + description = "this is a test file" + + driver.add( + "object", + did=did, + size=size, + file_name=file_name, + metadata=index_metadata, + urls_metadata=urls_metadata, + version=version, + urls=urls, + acl=acl, + authz=authz, + hashes=hashes, + baseid=baseid, + uploader=uploader, + description=description, + ) + did_list.append(did) + + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM index_record") + count = result.scalar() + assert count == n_records + + return did_list + + +def test_index_record_to_new_table(): + """ + Test index_record_to_new_table copies records from old tables to new record table. + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + n_records = 100 + create_record(n_records) + index_record_migrator.index_record_to_new_table(batch_size=10) + + engine = create_engine(POSTGRES_CONNECTION) + with engine.connect() as conn: + result = conn.execute("SELECT COUNT(*) FROM record") + count = result.scalar() + assert count == n_records + + +def test_get_index_record_hash(): + """ + Test get_index_record_hash from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_hash(did) + assert result == {"md5": "some_md5", "sha1": "some_sha1"} + + +def test_get_urls_record(): + """ + Test get_urls_record from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_urls_record(did) + assert result == ["s3://bucket/data.json", "gs://bucket/data.txt"] + + +def test_get_urls_metadata(): + """ + Test get_urls_metadata from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_urls_metadata(did) + assert result == { + "s3://bucket/data.json": {"metadata_key": "metadata_value"}, + "gs://bucket/data.txt": {"metadata_key": "metadata_value"}, + } + + +def test_get_index_record_ace(): + """ + Test get_index_record_ace from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_ace(did) + assert type(result) == list + + +def test_get_index_record_authz(): + """ + Test get_index_record_authz from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_authz(did) + assert type(result) == list + + +def test_get_index_record_metadata(): + """ + Test get_index_record_metadata from IndexRecordMigrator returns the correct format + """ + index_record_migrator = IndexRecordMigrator(creds_file="tests/test_creds.json") + did = create_record()[0] + result = index_record_migrator.get_index_record_metadata(did) + assert result == { + "metadata_key": "metadata_value", + "some_other_key": "some_other_value", + } diff --git a/tests/test_setup.py b/tests/test_setup.py deleted file mode 100644 index ca8773b1..00000000 --- a/tests/test_setup.py +++ /dev/null @@ -1,142 +0,0 @@ -import sqlite3 - -import tests.util as util - -from indexd.index.drivers.alchemy import SQLAlchemyIndexDriver -from indexd.alias.drivers.alchemy import SQLAlchemyAliasDriver - - -OLD_SQLITE = sqlite3.sqlite_version_info < (3, 7, 16) - -INDEX_HOST = "index.sq3" -ALIAS_HOST = "alias.sq3" - -INDEX_TABLES = { - "base_version": [(0, "baseid", "VARCHAR", 1, None, 1)], - "index_record": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "baseid", "VARCHAR", 0, None, 0), - (2, "rev", "VARCHAR", 0, None, 0), - (3, "form", "VARCHAR", 0, None, 0), - (4, "size", "BIGINT", 0, None, 0), - (5, "created_date", "DATETIME", 0, None, 0), - (6, "updated_date", "DATETIME", 0, None, 0), - (7, "file_name", "VARCHAR", 0, None, 0), - (8, "version", "VARCHAR", 0, None, 0), - (9, "uploader", "VARCHAR", 0, None, 0), - (10, "description", "VARCHAR", 0, None, 0), - (11, "content_created_date", "DATETIME", 0, None, 0), - (12, "content_updated_date", "DATETIME", 0, None, 0), - ], - "index_record_hash": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "hash_type", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - (2, "hash_value", "VARCHAR", 0, None, 0), - ], - "index_record_url": [ - (0, "did", "VARCHAR", 1, None, 1), - (1, "url", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - ], - "index_schema_version": [(0, "version", "INTEGER", 1, None, 1)], - "drs_bundle_record": [ - (0, "bundle_id", "VARCHAR", 1, None, 1), - (1, "name", "VARCHAR", 0, None, 0), - (2, "created_time", "DATETIME", 0, None, 0), - (3, "updated_time", "DATETIME", 0, None, 0), - (4, "checksum", "VARCHAR", 0, None, 0), - (5, "size", "BIGINT", 0, None, 0), - (6, "bundle_data", "TEXT", 0, None, 0), - (7, "description", "TEXT", 0, None, 0), - (8, "version", "VARCHAR", 0, None, 0), - (9, "aliases", "VARCHAR", 0, None, 0), - ], -} - -ALIAS_TABLES = { - "alias_record": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "rev", "VARCHAR", 0, None, 0), - (2, "size", "BIGINT", 0, None, 0), - (3, "release", "VARCHAR", 0, None, 0), - (4, "metastring", "VARCHAR", 0, None, 0), - (5, "keeper_authority", "VARCHAR", 0, None, 0), - ], - "alias_record_hash": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "hash_type", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - (2, "hash_value", "VARCHAR", 0, None, 0), - ], - "alias_record_host_authority": [ - (0, "name", "VARCHAR", 1, None, 1), - (1, "host", "VARCHAR", 1, None, 1 if OLD_SQLITE else 2), - ], - "alias_schema_version": [(0, "version", "INTEGER", 1, None, 1)], -} - -INDEX_CONFIG = {"driver": SQLAlchemyIndexDriver("sqlite:///index.sq3")} - -ALIAS_CONFIG = {"driver": SQLAlchemyAliasDriver("sqlite:///alias.sq3")} - - -@util.removes(INDEX_HOST) -def test_sqlite3_index_setup_tables(): - """ - Tests that the SQLite3 index database gets set up correctly. - """ - SQLAlchemyIndexDriver("sqlite:///index.sq3") - - with sqlite3.connect(INDEX_HOST) as conn: - c = conn.execute( - """ - SELECT name FROM sqlite_master WHERE type = 'table' - """ - ) - - tables = [i[0] for i in c] - - for table in INDEX_TABLES: - assert table in tables, "{table} not created".format(table=table) - - for table, schema in list(INDEX_TABLES.items()): - # NOTE PRAGMA's don't work with parameters... - c = conn.execute( - """ - PRAGMA table_info ('{table}') - """.format( - table=table - ) - ) - - assert schema == [i for i in c] - - -@util.removes(ALIAS_HOST) -def test_sqlite3_alias_setup_tables(): - """ - Tests that the SQLite3 alias database gets set up correctly. - """ - SQLAlchemyAliasDriver("sqlite:///alias.sq3") - - with sqlite3.connect(ALIAS_HOST) as conn: - c = conn.execute( - """ - SELECT name FROM sqlite_master WHERE type = 'table' - """ - ) - - tables = [i[0] for i in c] - - for table in ALIAS_TABLES: - assert table in tables, "{table} not created".format(table=table) - - for table, schema in list(ALIAS_TABLES.items()): - # NOTE PRAGMA's don't work with parameters... - c = conn.execute( - """ - PRAGMA table_info ('{table}') - """.format( - table=table - ) - ) - - assert schema == [i for i in c] diff --git a/tests/test_urls_endpoints.py b/tests/test_urls_endpoints.py index 19db0aca..d8da5f9e 100644 --- a/tests/test_urls_endpoints.py +++ b/tests/test_urls_endpoints.py @@ -4,7 +4,7 @@ @pytest.fixture(scope="function") -def test_data(client, user): +def test_data(client, user, combined_default_and_single_table_settings): system_random = random.SystemRandom() url_x_count = system_random.randint(2, 5) @@ -40,7 +40,7 @@ def test_data(client, user): return url_x_count, versioned_count, unversioned_count -def test_query_urls(client, test_data): +def test_query_urls(client, test_data, combined_default_and_single_table_settings): """ Args: client (test fixture) @@ -91,7 +91,9 @@ def test_query_urls(client, test_data): assert len(urls_list) == versioned_count + unversioned_count - 2 * url_x_count -def test_query_urls_metadata(client, test_data): +def test_query_urls_metadata( + client, test_data, combined_default_and_single_table_settings +): """ Args: client (test fixture)