Skip to content

Commit

Permalink
Merge pull request dmwm#4586 from mmascher/master
Browse files Browse the repository at this point in the history
Fix error message for 'outputPrimaryDataset'
  • Loading branch information
mmascher committed Dec 18, 2015
2 parents be9eebf + d629737 commit 94b3252
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion bin/crab3bootstrap
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
after running 'scram unset env'
In particular the script:
- saves the environment variables used by CRAB3: SCRAM_ARCH, CMSSW_BASE, CMSSW_RELEASE_BASE, CMSSW_VERSION, LOCALRT
- saves the environment variables used by CRAB3: SCRAM_ARCH, CMSSW_BASE, CMSSW_VERSION
- handles the PSet file:
* get the output files (tfiles and edmfiles)
* dump the pickled expanded version of the pset
Expand Down
10 changes: 5 additions & 5 deletions src/python/CRABClient/ClientUtilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def filter(self, record):
def removecolor(text):
if not text:
return text
for color, colorval in colors.colordict.iteritems():
for dummyColor, colorval in colors.colordict.iteritems():
if colorval in text:
text = text.replace(colorval, '')
return text
Expand Down Expand Up @@ -200,7 +200,7 @@ def uploadlogfile(logger, proxyfilename, logfilename = None, logpath = None, ins
cmd = 'env'
logger.debug('Running env command')
pipe = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
stdout, stderr = pipe.communicate()
stdout, dummyStderr = pipe.communicate()
logger.debug('\n\n\nUSER ENVIROMENT\n%s' % stdout)
except Exception as se:
logger.debug('Failed to get the user env\nException message: %s' % (se))
Expand Down Expand Up @@ -430,8 +430,8 @@ def getWorkArea(projdir):
return requestarea, requestname


def loadCache(dir, logger):
requestarea, requestname = getWorkArea(dir)
def loadCache(mydir, logger):
requestarea, dummyRequestname = getWorkArea(mydir)
cachename = os.path.join(requestarea, '.requestcache')
#Check if the directory exists.
if not os.path.isdir(requestarea):
Expand Down Expand Up @@ -660,7 +660,7 @@ def server_info(subresource, server, proxyfilename, baseurl, **kwargs):
server = CRABClient.Emulator.getEmulator('rest')(server, proxyfilename, proxyfilename, version=__version__)
requestdict = {'subresource': subresource}
requestdict.update(**kwargs)
dictresult, status, reason = server.get(baseurl, requestdict)
dictresult, dummyStatus, dummyReason = server.get(baseurl, requestdict)

return dictresult['result'][0]

Expand Down
2 changes: 1 addition & 1 deletion src/python/CRABClient/JobType/PrivateMC.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def validateConfig(self, config):

## If publication is True, check that there is a primary dataset name specified.
if getattr(config.Data, 'publication', getParamDefaultValue('Data.publication')):
if not getattr(config.Data, 'outputPrimaryDataset'):
if not getattr(config.Data, 'outputPrimaryDataset', getParamDefaultValue('Data.outputPrimaryDataset')):
msg = "Invalid CRAB configuration: Parameter Data.outputPrimaryDataset not specified."
msg += "\nMC generation job type requires this parameter for publication."
return False, msg
Expand Down

0 comments on commit 94b3252

Please sign in to comment.