Skip to content

Commit

Permalink
Perform read after schema change to account for updated table etag. P…
Browse files Browse the repository at this point in the history
…atching the POC script to work again
  • Loading branch information
BryanFauble committed Feb 21, 2025
1 parent 93c450b commit d588d9f
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 17 deletions.
27 changes: 10 additions & 17 deletions docs/scripts/object_orientated_programming_poc/oop_poc_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from datetime import date, datetime, timedelta, timezone

import synapseclient
from synapseclient.models import Column, ColumnType, CsvResultFormat, Row, Table
from synapseclient.models import Column, ColumnType, Table

PROJECT_ID = "syn52948289"

Expand Down Expand Up @@ -92,7 +92,7 @@ def store_table():
annotations=annotations_for_my_table,
)

table = table.store_schema()
table = table.store()

print("Table created:")
print(table)
Expand All @@ -107,7 +107,7 @@ def store_table():

# Updating annotations on my table ===============================================
copy_of_table.annotations["my_key_string"] = ["new", "values", "here"]
stored_table = copy_of_table.store_schema()
stored_table = copy_of_table.store()
print("Table updated:")
print(stored_table)

Expand All @@ -116,31 +116,24 @@ def store_table():
path_to_csv = os.path.join(os.path.expanduser("~/temp"), f"{name_of_csv}.csv")
write_random_csv_with_data(path_to_csv)

csv_path = copy_of_table.store_rows_from_csv(csv_path=path_to_csv)
copy_of_table.store_rows(values=path_to_csv)

print("Stored data to table from CSV:")
print(csv_path)
print("Stored data to table from CSV")

# Querying for data from a table =================================================
destination_csv_location = os.path.expanduser("~/temp/my_query_results")

table_id_to_query = copy_of_table.id
Table.query(
query=f"SELECT * FROM {table_id_to_query}",
result_format=CsvResultFormat(download_location=destination_csv_location),
)
dataframe_from_query = Table.query(query=f"SELECT * FROM {table_id_to_query}")

print(f"Created results at: {destination_csv_location}")
print(f"Got results: {dataframe_from_query}")

# Deleting rows from a table =====================================================
copy_of_table.delete_rows(rows=[Row(row_id=1)])
# Deleting a row from the table =====================================================
copy_of_table.delete_rows(query=f"SELECT * from {table_id_to_query} LIMIT 1")

# Deleting a table ===============================================================
table_to_delete = Table(
name="my_test_table_I_want_to_delete",
columns=columns,
parent_id=PROJECT_ID,
).store_schema()
).store()

table_to_delete.delete()

Expand Down
4 changes: 4 additions & 0 deletions synapseclient/models/mixins/table_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,10 @@ async def store_async(
for column in self.columns.values():
updated_columns[column.name] = column
self.columns = updated_columns
await self.get_async(
include_columns=False,
synapse_client=synapse_client,
)

re_read_required = await store_entity_components(
root_resource=self,
Expand Down

0 comments on commit d588d9f

Please sign in to comment.