Skip to content

Commit

Permalink
Add DoesNotExist error codes which is used by bulkblocks2.go codebase
Browse files Browse the repository at this point in the history
  • Loading branch information
vkuznet committed Nov 15, 2022
1 parent f9dde71 commit 000d8b9
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 42 deletions.
26 changes: 13 additions & 13 deletions dbs/bulkblocks2.go
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ func getPrimaryDatasetTypeID(primaryDSType, hash string) (int64, error) {
if err != nil {
msg := fmt.Sprintf("%s unable to find primary_ds_type_id for primary ds type='%s'", hash, primaryDSType)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getPrimaryDatasetTypeID")
return 0, Error(err, PrimaryDatasetTypeDoesNotExist, msg, "dbs.bulkblocks.getPrimaryDatasetTypeID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -129,7 +129,7 @@ func getPrimaryDatasetID(
if err != nil {
msg := fmt.Sprintf("%s unable to find primary_ds_id for primary ds name='%s'", hash, primaryDSName)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getPrimaryDatasetID")
return 0, Error(err, PrimaryDatasetDoesNotExist, msg, "dbs.bulkblocks.getPrimaryDatasetID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -169,7 +169,7 @@ func getProcessingEraID(
if err != nil {
msg := fmt.Sprintf("%s unable to find processing_era_id for processing version='%v'", hash, processingVersion)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getProcessingEraID")
return 0, Error(err, ProcessingEraDoesNotExist, msg, "dbs.bulkblocks.getProcessingEraID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -213,7 +213,7 @@ func getAcquisitionEraID(
if err != nil {
msg := fmt.Sprintf("%s unable to find acquisition_era_id for acq era name='%s'", hash, acquisitionEraName)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getAcquisitionEraID")
return 0, Error(err, AcquisitionEraDoesNotExist, msg, "dbs.bulkblocks.getAcquisitionEraID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -254,7 +254,7 @@ func getDataTierID(
if err != nil {
msg := fmt.Sprintf("%s unable to find data_tier_id for tier name='%s'", hash, tierName)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getDataTierID")
return 0, Error(err, DataTierDoesNotExist, msg, "dbs.bulkblocks.getDataTierID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -289,7 +289,7 @@ func getPhysicsGroupID(physName, hash string) (int64, error) {
if err != nil {
msg := fmt.Sprintf("%s, unable to find physics_group_id for physics group name='%s'", hash, physName)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getPhysicsGroupID")
return 0, Error(err, PhysicsGroupDoesNotExist, msg, "dbs.bulkblocks.getPhysicsGroupID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -326,7 +326,7 @@ func getDatasetAccessTypeID(
if err != nil {
msg := fmt.Sprintf("%s unable to find dataset_access_type_id for data access type='%s'", hash, datasetAccessType)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getDatasetAccesssTypeID")
return 0, Error(err, DatasetAccessTypeDoesNotExist, msg, "dbs.bulkblocks.getDatasetAccesssTypeID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -363,7 +363,7 @@ func getProcessedDatasetID(
if err != nil {
msg := fmt.Sprintf("%s unable to find processed_ds_id for procDS='%s'", hash, processedDSName)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getProcessedDSName")
return 0, Error(err, ProcessedDatasetDoesNotExist, msg, "dbs.bulkblocks.getProcessedDSName")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -434,7 +434,7 @@ func getDatasetID(
if err != nil {
msg := fmt.Sprintf("%s unable to insert dataset='%v'", hash, dataset)
log.Println(msg)
return 0, Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.getDatasetID")
return 0, Error(err, DatasetDoesNotExist, msg, "dbs.bulkblocks.getDatasetID")
}
err = tx.Commit()
if err != nil {
Expand Down Expand Up @@ -670,7 +670,7 @@ func (a *API) InsertBulkBlocksConcurrently() error {
if rid, err := GetID(tx, "BLOCKS", "block_id", "block_name", bName); err == nil && rid != 0 {
err := errors.New(fmt.Sprintf("Block %s already exists", bName))
msg := "Data already exist in DBS"
return Error(err, DatabaseErrorCode, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
return Error(err, BlockAlreadyExists, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
}

// get blockID
Expand Down Expand Up @@ -703,7 +703,7 @@ func (a *API) InsertBulkBlocksConcurrently() error {
if err != nil {
msg := fmt.Sprintf("%s unable to find file_type_id for %v, error %v", hash, ftype, err)
log.Println(msg)
return Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
return Error(err, FileDataTypesDoesNotExist, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
}
}
// insert files
Expand Down Expand Up @@ -872,7 +872,7 @@ func (a *API) InsertBulkBlocksConcurrently() error {
err := errors.New("unable to locate parent file id")
msg := fmt.Sprintf("no file id found for parent '%s'", lfn)
log.Println(msg)
return Error(err, DatabaseErrorCode, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
return Error(err, FileParentDoesNotExist, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
}
err = rrr.Insert(tx)
if err != nil {
Expand Down Expand Up @@ -914,7 +914,7 @@ func (a *API) InsertBulkBlocksConcurrently() error {
if err != nil {
msg := fmt.Sprintf("%s unable to find dataset_id for %s, error %v", hash, ds, err)
log.Println(msg)
return Error(err, GetIDErrorCode, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
return Error(err, DatasetParentDoesNotExist, msg, "dbs.bulkblocks.InsertBulkBlocksConcurrently")
}
r := DatasetParents{THIS_DATASET_ID: datasetID, PARENT_DATASET_ID: pid}
err = r.Insert(tx)
Expand Down
71 changes: 42 additions & 29 deletions dbs/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,35 +36,48 @@ var InvalidRequestErr = errors.New("invalid request error")

// DBS Error codes provides static representation of DBS errors, they cover 1xx range
const (
GenericErrorCode = iota + 100 // generic DBS error
DatabaseErrorCode // 101 database error
TransactionErrorCode // 102 transaction error
QueryErrorCode // 103 query error
RowsScanErrorCode // 104 row scan error
SessionErrorCode // 105 db session error
CommitErrorCode // 106 db commit error
ParseErrorCode // 107 parser error
LoadErrorCode // 108 loading error, e.g. load template
GetIDErrorCode // 109 get id db error
InsertErrorCode // 110 db insert error
UpdateErrorCode // 111 update error
LastInsertErrorCode // 112 db last insert error
ValidateErrorCode // 113 validation error
PatternErrorCode // 114 pattern error
DecodeErrorCode // 115 decode error
EncodeErrorCode // 116 encode error
ContentTypeErrorCode // 117 content type error
ParametersErrorCode // 118 parameters error
NotImplementedApiCode // 119 not implemented API error
ReaderErrorCode // 120 io reader error
WriterErrorCode // 121 io writer error
UnmarshalErrorCode // 122 json unmarshal error
MarshalErrorCode // 123 marshal error
HttpRequestErrorCode // 124 HTTP request error
MigrationErrorCode // 125 Migration error
RemoveErrorCode // 126 remove error
InvalidRequestErrorCode // 127 invalid request error
LastAvailableErrorCode // last available DBS error code
GenericErrorCode = iota + 100 // generic DBS error
DatabaseErrorCode // 101 database error
TransactionErrorCode // 102 transaction error
QueryErrorCode // 103 query error
RowsScanErrorCode // 104 row scan error
SessionErrorCode // 105 db session error
CommitErrorCode // 106 db commit error
ParseErrorCode // 107 parser error
LoadErrorCode // 108 loading error, e.g. load template
GetIDErrorCode // 109 get id db error
InsertErrorCode // 110 db insert error
UpdateErrorCode // 111 update error
LastInsertErrorCode // 112 db last insert error
ValidateErrorCode // 113 validation error
PatternErrorCode // 114 pattern error
DecodeErrorCode // 115 decode error
EncodeErrorCode // 116 encode error
ContentTypeErrorCode // 117 content type error
ParametersErrorCode // 118 parameters error
NotImplementedApiCode // 119 not implemented API error
ReaderErrorCode // 120 io reader error
WriterErrorCode // 121 io writer error
UnmarshalErrorCode // 122 json unmarshal error
MarshalErrorCode // 123 marshal error
HttpRequestErrorCode // 124 HTTP request error
MigrationErrorCode // 125 Migration error
RemoveErrorCode // 126 remove error
InvalidRequestErrorCode // 127 invalid request error
BlockAlreadyExists // 128 block xxx already exists in DBS
FileDataTypesDoesNotExist // 129 FileDataTypes does not exist in DBS
FileParentDoesNotExist // 130 FileParent does not exist in DBS
DatasetParentDoesNotExist // 131 DatasetParent does not exist in DBS
ProcessedDatasetDoesNotExist // 132 ProcessedDataset does not exist in DBS
PrimaryDatasetTypeDoesNotExist // 133 PrimaryDatasetType does not exist in DBS
PrimaryDatasetDoesNotExist // 134 PrimaryDataset does not exist in DBS
ProcessingEraDoesNotExist // 135 ProcessingEra does not exist in DBS
AcquisitionEraDoesNotExist // 136 AcquisitionEra does not exist in DBS
DataTierDoesNotExist // 137 DataTier does not exist in DBS
PhysicsGroupDoesNotExist // 138 PhysicsGroup does not exist in DBS
DatasetAccessTypeDoesNotExist // 139 DatasetAccessType does not exist in DBS
DatasetDoesNotExist // 140 Dataset does not exist in DBS
LastAvailableErrorCode // last available DBS error code
)

// DBSError represents common structure for DBS errors
Expand Down

0 comments on commit 000d8b9

Please sign in to comment.