Skip to content

Commit

Permalink
Adjust datasets API
Browse files Browse the repository at this point in the history
  • Loading branch information
vkuznet committed Jan 17, 2021
1 parent c2f42c8 commit a7f1be3
Show file tree
Hide file tree
Showing 2 changed files with 157 additions and 17 deletions.
138 changes: 121 additions & 17 deletions dbs/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,35 @@ import (
"database/sql"
"fmt"
"net/http"
"strings"
)

// Datasets API
func (API) Datasets(params Record, w http.ResponseWriter) (int64, error) {
var args []interface{}
var conds []string
tmpl := make(Record)
tmpl["Owner"] = DBOWNER
tmpl["TokenGenerator"] = false
tmpl["Runs"] = false
tmpl["Lfns"] = false
tmpl["Version"] = false
tmpl["ParentDataset"] = false

// parse detail arugment
detail, _ := getSingleValue(params, "detail")
// detail, _ := getSingleValue(params, "detail")

// parse dataset argument
datasets := getValues(params, "dataset")
if len(datasets) > 1 {
cond := fmt.Sprintf("D.DATASET in (SELECT TOKEN FROM TOKEN_GENERATOR)")
token, binds := TokenGenerator(datasets, 100) // 100 is max for # of allowed datasets
conds = append(conds, cond+token)
for _, v := range binds {
args = append(args, v)
}
} else if len(datasets) == 1 {
conds, args = AddParam("dataset", "D.DATASET", params, conds, args)
}

// parse is_dataset_valid argument
isValid, _ := getSingleValue(params, "is_dataset_valid")
Expand All @@ -33,28 +52,113 @@ func (API) Datasets(params Record, w http.ResponseWriter) (int64, error) {
conds = append(conds, cond)
args = append(args, dataset_access_type)

// parse dataset argument
datasets := getValues(params, "dataset")
if len(datasets) > 1 {
cond = fmt.Sprintf("D.DATASET in (SELECT TOKEN FROM TOKEN_GENERATOR)")
token, binds := TokenGenerator(datasets, 100) // 100 is max for # of allowed datasets
conds = append(conds, cond+token)
for _, v := range binds {
// optional arguments
if _, e := getSingleValue(params, "parent_dataset"); e == nil {
tmpl["ParentDataset"] = true
conds, args = AddParam("parent_dataset", "PDS.DATASET PARENT_DATASET", params, conds, args)
}
if _, e := getSingleValue(params, "release_version"); e == nil {
tmpl["Version"] = true
conds, args = AddParam("release_version", "RV.RELEASE_VERSION", params, conds, args)
}
if _, e := getSingleValue(params, "pset_hash"); e == nil {
tmpl["Version"] = true
conds, args = AddParam("pset_hash", "PSH_PSET_HASH", params, conds, args)
}
if _, e := getSingleValue(params, "app_name"); e == nil {
tmpl["Version"] = true
conds, args = AddParam("app_name", "AEX.APP_NAME", params, conds, args)
}
if _, e := getSingleValue(params, "output_module_label"); e == nil {
tmpl["Version"] = true
conds, args = AddParam("output_module_label", "OMC.OUTPUT_MODULE_LABEL", params, conds, args)
}
if _, e := getSingleValue(params, "logical_file_name"); e == nil {
tmpl["Lfns"] = true
conds, args = AddParam("logical_file_name", "FL.LOGICAL_FILE_NAME", params, conds, args)
}
conds, args = AddParam("primary_ds_name", "P.PRIMARY_DS_NAME", params, conds, args)
conds, args = AddParam("processed_ds_name", "PD.PROCESSED_DS_NAME", params, conds, args)
conds, args = AddParam("data_tier_name", "DT.DATA_TIER_NAME", params, conds, args)
conds, args = AddParam("primary_ds_type", "PDT.PRIMARY_DS_TYPE", params, conds, args)
conds, args = AddParam("physics_group_name", "PH.PHYSICS_GROUP_NAME", params, conds, args)
conds, args = AddParam("global_tag", "OMC.GLOBAL_TAG", params, conds, args)
conds, args = AddParam("processing_version", "PE.PROCESSING_VERSION", params, conds, args)
conds, args = AddParam("acqusition_era", "AE.ACQUISITION_ERA_NAME", params, conds, args)
conds, args = AddParam("cdate", "D.CREATION_DATE", params, conds, args)
minDate := getValues(params, "min_cdate")
maxDate := getValues(params, "max_cdate")
if len(minDate) == 1 && len(maxDate) == 1 {
_, minval := OperatorValue(minDate[0])
_, maxval := OperatorValue(maxDate[0])
if minval != "0" && maxval != "0" {
cond := fmt.Sprintf(" D.CREATION_DATE BETWEEN %s and %s", placeholder("min_cdate"), placeholder("max_cdate"))
conds = append(conds, cond)
args = append(args, minval)
args = append(args, maxval)
} else if minval != "0" && maxval == "0" {
cond := fmt.Sprintf(" D.CREATION_DATE > %s", placeholder("min_cdate"))
conds = append(conds, cond)
args = append(args, minval)
} else if minval == "0" && maxval != "0" {
cond := fmt.Sprintf(" D.CREATION_DATE < %s", placeholder("max_cdate"))
conds = append(conds, cond)
args = append(args, maxval)
}
}
conds, args = AddParam("ldate", "D.LAST_MODIFICATION_DATE", params, conds, args)
minDate = getValues(params, "min_ldate")
maxDate = getValues(params, "max_ldate")
if len(minDate) == 1 && len(maxDate) == 1 {
_, minval := OperatorValue(minDate[0])
_, maxval := OperatorValue(maxDate[0])
if minval != "0" && maxval != "0" {
cond := fmt.Sprintf(" D.CREATION_DATE BETWEEN %s and %s", placeholder("min_ldate"), placeholder("max_ldate"))
conds = append(conds, cond)
args = append(args, minval)
args = append(args, maxval)
} else if minval != "0" && maxval == "0" {
cond := fmt.Sprintf(" D.CREATION_DATE > %s", placeholder("min_ldate"))
conds = append(conds, cond)
args = append(args, minval)
} else if minval == "0" && maxval != "0" {
cond := fmt.Sprintf(" D.CREATION_DATE < %s", placeholder("max_ldate"))
conds = append(conds, cond)
args = append(args, maxval)
}
}
conds, args = AddParam("create_by", "D.CREATE_BY", params, conds, args)
conds, args = AddParam("last_modified_by", "D.LAST_MODIFIED_BY", params, conds, args)
conds, args = AddParam("prep_id", "D.PREP_ID", params, conds, args)
conds, args = AddParam("dataset_id", "D.DATASET_ID", params, conds, args)

// run_num
runs, err := ParseRuns(getValues(params, "run_num"))
if err != nil {
return 0, err
}
if len(runs) > 0 {
tmpl["Runs"] = true
token, whereRuns, bindsRuns := runsClause("FLM", runs)
tmpl["TokenGenerator"] = token
conds = append(conds, whereRuns)
for _, v := range bindsRuns {
args = append(args, v)
}
} else if len(datasets) == 1 {
conds, args = AddParam("dataset", "D.DATASET", params, conds, args)
}

// get SQL statement from static area
stm := getSQL("datasets")
stm, err := LoadTemplateSQL("datasets", tmpl)
if err != nil {
return 0, err
}
cols := []string{"dataset_id", "dataset", "prep_id", "xtcrosssection", "creation_date", "create_by", "last_modification_date", "last_modified_by", "primary_ds_name", "primary_ds_type", "processed_ds_name", "data_tier_name", "dataset_access_type", "acquisition_era_name", "processing_version", "physics_group_name"}
vals := []interface{}{new(sql.NullInt64), new(sql.NullString), new(sql.NullString), new(sql.NullFloat64), new(sql.NullInt64), new(sql.NullString), new(sql.NullInt64), new(sql.NullString), new(sql.NullString), new(sql.NullString), new(sql.NullString), new(sql.NullString), new(sql.NullString), new(sql.NullString), new(sql.NullInt64), new(sql.NullString)}
if strings.ToLower(detail) != "true" {
stm = getSQL("datasets_short")
cols = []string{"dataset"}
vals = []interface{}{new(sql.NullString)}
}
// if strings.ToLower(detail) != "true" {
// stm = getSQL("datasets_short")
// cols = []string{"dataset"}
// vals = []interface{}{new(sql.NullString)}
// }
stm = WhereClause(stm, conds)

// use generic query API to fetch the results from DB
Expand Down
36 changes: 36 additions & 0 deletions static/sql/datasets.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
{{.TokenGenerator}}
{{if .Runs}}
SELECT DISTINCT
{{else}}
SELECT
{{end if}}
D.DATASET_ID, D.DATASET, D.PREP_ID,
D.XTCROSSSECTION,
D.CREATION_DATE, D.CREATE_BY,
Expand All @@ -11,6 +16,16 @@ SELECT
AE.ACQUISITION_ERA_NAME,
PE.PROCESSING_VERSION,
PH.PHYSICS_GROUP_NAME
{{if .ParentDataset}}
,PDS.DATASET PARENT_DATASET
{{end if}}
{{if .Version}}
,OMC.OUTPUT_MODULE_LABEL
,OMC.GLOBAL_TAG
,RV.RELEASE_VERSION,
,PSH.PSET_HASH,
,AEX.APP_NAME
{{end if}}

FROM {{.Owner}}.DATASETS D
JOIN {{.Owner}}.PRIMARY_DATASETS P ON P.PRIMARY_DS_ID = D.PRIMARY_DS_ID
Expand All @@ -22,3 +37,24 @@ SELECT
LEFT OUTER JOIN {{.Owner}}.ACQUISITION_ERAS AE ON AE.ACQUISITION_ERA_ID = D.ACQUISITION_ERA_ID
LEFT OUTER JOIN {{.Owner}}.PROCESSING_ERAS PE ON PE.PROCESSING_ERA_ID = D.PROCESSING_ERA_ID
LEFT OUTER JOIN {{.Owner}}.PHYSICS_GROUPS PH ON PH.PHYSICS_GROUP_ID = D.PHYSICS_GROUP_ID
{{if .ParentDataset}}
LEFT OUTER JOIN {{.Owner}}.DATASET_PARENTS DSP ON DSP.THIS_DATASET_ID = D.DATASET_ID
LEFT OUTER JOIN {{.Owner}}.DATASETS PDS ON PDS.DATASET_ID = DSP.PARENT_DATASET_ID
{{end if}}

{{if .Version}}
LEFT OUTER JOIN {{.Owner}}.DATASET_OUTPUT_MOD_CONFIGS DOMC ON DOMC.DATASET_ID = D.DATASET_ID
LEFT OUTER JOIN {{.Owner}}.OUTPUT_MODULE_CONFIGS OMC ON OMC.OUTPUT_MOD_CONFIG_ID = DOMC.OUTPUT_MOD_CONFIG_ID
LEFT OUTER JOIN {{.Owner}}.RELEASE_VERSIONS RV ON RV.RELEASE_VERSION_ID = OMC.RELEASE_VERSION_ID
LEFT OUTER JOIN {{.Owner}}.PARAMETER_SET_HASHES PSH ON PSH.PARAMETER_SET_HASH_ID = OMC.PARAMETER_SET_HASH_ID
LEFT OUTER JOIN {{.Owner}}.APPLICATION_EXECUTABLES AEX ON AEX.APP_EXEC_ID = OMC.APP_EXEC_ID
{{end if}}

{{if .Runs}}
{{if .Lfns}}
JOIN {{.Owner}}.FILES FL on FL.DATASET_ID = D.DATASET_ID
{{else}}
JOIN {{.Owner}}.FILES FL on FL.DATASET_ID = D.DATASET_ID
{{end if}}
JOIN {{.Owner}}.FILE_LUMIS FLLU on FLLU.FILE_ID=FL.FILE_ID
{{end if}}

0 comments on commit a7f1be3

Please sign in to comment.