RStDGenG25Nov05

Prepare

Used rseed.cc to generate the line for the random number seed.

[lxplus]cat e100_gengrid.jdl
############# Athena #################
Executable = "athena_gen03.sh";
StdOutput = "athena_gen.out";
StdError = "athena_gen.err";
InputSandbox = {"athena_gen03.sh","jobOptions.pythia.vex06d_v1_003.py"};
OutputSandbox = {"athena_gen03.out","athena_gen.err","athena_result.out", "CLIDDBout.txt"};
Requirements = Member("VO-atlas-release-10.0.1", other.GlueHostApplicationSoftwareRunTimeEnvironment);
######################################
[lxplus]cat athena_gen03.sh
#!/bin/bash
source $VO_ATLAS_SW_DIR/software/10.0.1/setup.sh
source $SITEROOT/dist/10.0.1/Control/AthenaRunTime/*/cmt/setup.sh
cp $SITEROOT/dist/10.0.1/InstallArea/share/PDGTABLE.MeV .
# Run the job:
athena.py jobOptions.pythia.vex06d_v1_003.py > athena_result.out 2>&1
ls -l
hostname
lcg-cr -v --vo atlas -d se2-gla.scotgrid.ac.uk \
   -l lfn:stdenis_vbf_vex06d_v1_003.pool.root file://`pwd`/vbf.pool.root
[lxplus]cat jobOptions.pythia.vex06d_v1_003.py
###############################################################
#
# Job options file
#
#==============================================================
#--------------------------------------------------------------
# General Application Configuration options
#--------------------------------------------------------------
theApp.setup( MONTECARLO )
 
include( "PartPropSvc/PartPropSvc.py" )
 
# Add POOL persistency
 
include( "AthenaPoolCnvSvc/WriteAthenaPool_jobOptions.py" )
include( "GeneratorObjectsAthenaPool/GeneratorObjectsAthenaPool_joboptions.py" )
 
# 2101 = EventInfo
# 133273 = MCTruth (HepMC)
Stream1.ItemList += [ "2101#*", "133273#*" ]
include("AthenaSealSvc/AthenaSealSvc_joboptions.py" )
AthenaSealSvc.CheckDictionary = TRUE
Stream1.OutputFile = "vbf.pool.root"
 
 
#--------------------------------------------------------------
# Private Application Configuration options
#--------------------------------------------------------------
theApp.Dlls  += [ "TruthExamples", "Pythia_i" ]
theApp.TopAlg = ["Pythia","DumpMC"]
theApp.ExtSvc += ["AtRndmGenSvc"]
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
MessageSvc = Service( "MessageSvc" )
MessageSvc.OutputLevel               = 4
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
# Number of events to be processed (default is 10)
theApp.EvtMax = 100
# Set run number (default 0 causes problems)
 edg-job-submit -vo atlas -o jid1
EventSelector = Service("EventSelector")
EventSelector.RunNumber = 1337
#
#--------------------------------------------------------------
# Algorithms Private Options
#--------------------------------------------------------------
AtRndmGenSvc = Service( "AtRndmGenSvc" )
AtRndmGenSvc.Seeds = ["PYTHIA 2840827 2545112526", "PYTHIA_INIT 890466 6608044"]
# AtRndmGenSvc.ReadFromFile = true;
Pythia = Algorithm( "Pythia" )
Pythia.PythiaCommand = ["pysubs msel 0","pysubs msub 124 1",
                        "pydat2 pmas 25 1 160",
                        "pypars mstp 61 2",
                        "pypars mstp 71 1",
                        "pypars mstp 81 0",
                        "pypars mstp 111 1",
                        "pydat3 mdme 190 1 0",
                        "pydat3 mdme 191 1 0",
                        "pydat3 mdme 192 1 0",
                        "pydat3 mdme 194 1 0",
                        "pydat3 mdme 195 1 0",
                        "pydat3 mdme 196 1 0",
                        "pydat3 mdme 198 1 0",
                        "pydat3 mdme 199 1 0",
                        "pydat3 mdme 200 1 0",
                        "pydat3 mdme 206 1 2",
                        "pydat3 mdme 207 1 3",
                        "pydat3 mdme 208 1 0",
                        "pydat3 mdme 210 1 0",
                        "pydat3 mdme 211 1 0",
                        "pydat3 mdme 212 1 0",
                        "pydat3 mdme 213 1 0",
                        "pydat3 mdme 214 1 0",
                        "pydat3 mdme 215 1 0",
                        "pydat3 mdme 218 1 0",
                        "pydat3 mdme 219 1 0",
                        "pydat3 mdme 220 1 0",
                        "pydat3 mdme 222 1 0",
                        "pydat3 mdme 223 1 0",
                        "pydat3 mdme 224 1 0",
                        "pydat3 mdme 225 1 0",
                        "pydat3 mdme 226 1 1",
                        "pydat3 mdme 174 1 0",
                        "pydat3 mdme 175 1 0",
                        "pydat3 mdme 176 1 0",
                        "pydat3 mdme 177 1 0",
                        "pydat3 mdme 178 1 0",
                        "pydat3 mdme 179 1 0",
                        "pydat3 mdme 182 1 0",
                        "pydat3 mdme 183 1 0",
                        "pydat3 mdme 184 1 0",
                        "pydat3 mdme 185 1 0",
                        "pydat3 mdme 186 1 0",
                        "pydat3 mdme 187 1 0"
                        ]
#---------------------------------------------------------------
# Ntuple service output
#---------------------------------------------------------------
#==============================================================
#
# End of job options file
#
###############################################################
[lxplus]
[lxplus]edg-job-submit -vo atlas -o jid1 e100_gengrid.jdl
 
Selected Virtual Organisation name (from --vo option): atlas
Connecting to host gdrb03.cern.ch, port 7772
Logging to host gdrb03.cern.ch, port 9002
 
================================ edg-job-submit Success =====================================
 The job has been successfully submitted to the Network Server.
 Use edg-job-status command to check job current status. Your job identifier (edg_jobId) is:
 
 - https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
 
 The edg_jobId has been saved in the following file:
 /afs/cern.ch/user/s/stdenis/testarea/10.0.1/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis-00-02-02/share/jid1
=============================================================================================
 
[lxplus]export JOB=https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
[lxplus]./ckjob
Sat Nov 26 01:58:46 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
Current Status:     Ready
Status Reason:      unavailable
Destination:        ce001.m45.ihep.su:2119/jobmanager-pbs-short
reached on:         Sat Nov 26 00:58:42 2005
*************************************************************
 
 Sat Nov 26 01:59:46 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
Current Status:     Ready
Status Reason:      unavailable
Destination:        golias25.farm.particle.cz:2119/jobmanager-lcgpbs-lcgatlasprod
reached on:         Sat Nov 26 00:59:32 2005
*************************************************************

Sat Nov 26 02:00:47 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
Current Status:     Scheduled
Status Reason:      Job successfully submitted to Globus
Destination:        golias25.farm.particle.cz:2119/jobmanager-lcgpbs-lcgatlasprod
reached on:         Sat Nov 26 01:00:01 2005
*************************************************************
Sat Nov 26 02:03:48 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
Current Status:     Running
Status Reason:      Job successfully submitted to Globus
Destination:        golias25.farm.particle.cz:2119/jobmanager-lcgpbs-lcgatlasprod
reached on:         Sat Nov 26 01:03:20 2005
*************************************************************

Sat Nov 26 02:05:49 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
Current Status:     Done (Success)
Exit code:          1
Status Reason:      There were some warnings: some file(s) listed in the output sandbox were not available and were ignored
Destination:        golias25.farm.particle.cz:2119/jobmanager-lcgpbs-lcgatlasprod
reached on:         Sat Nov 26 01:05:41 2005
*************************************************************


Results

[lxplus]edg-job-get-output -dir . $JOB
 
Retrieving files from host: gdrb03.cern.ch ( for https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg )
 
*********************************************************************************
                        JOB GET OUTPUT OUTCOME
 
 Output sandbox files for the job:
 - https://gdrb03.cern.ch:9000/91g-L0fBhPBWgA1kdxQ_xg
 have been successfully retrieved and stored in the directory:
 /afs/cern.ch/user/s/stdenis/testarea/10.0.1/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis-00-02-02/share/stdenis_91g-L0fBhPBWgA1kdxQ_xg
 
*********************************************************************************
 [lxplus]cat stdenis_91g-L0fBhPBWgA1kdxQ_xg/athena_result.out
Sat Nov 26 02:01:13 CET 2005
Athena               INFO including file "AthenaCommon/Compat.py"
Athena               INFO including file "AthenaCommon/Bootstrap.py"
ApplicationMgr       INFO Successfully loaded modules:
ApplicationMgr       INFO Application Manager Configured successfully
Athena               INFO including file "AthenaCommon/Atlas.UnixStandardJob.py"
Athena               INFO including file "StoreGate/StoreGate_jobOptions.py"
ApplicationMgr       INFO Successfully loaded modules: StoreGate, CLIDSvc
Athena               INFO including file "IOVSvc/IOVSvc.py"
ServiceManager    WARNING Service factory for type IOVSvc already declared
ApplicationMgr       INFO Successfully loaded modules: IOVSvc
ActiveStoreSvc       INFO Initializing ActiveStoreSvc - package version StoreGate-02-14-14
Athena               INFO including file "DetDescrCnvSvc/DetStore_joboptions.py"
ApplicationMgr       INFO Successfully loaded modules: DetDescrCnvSvc
EventPersistenc...   INFO "CnvServices": ["DetDescrCnvSvc"]
Athena               INFO including file "IdDictDetDescrCnv/IdDictDetDescrCnv_joboptions.py"
ApplicationMgr       INFO Successfully loaded modules: IdDictDetDescrCnv
Athena               INFO including file "jobOptions.pythia.vex06d_v1_003.py"
ApplicationMgr       INFO Successfully loaded modules: McEventSelector
EventPersistenc...   INFO "CnvServices": ["DetDescrCnvSvc", "McCnvSvc"]
Athena               INFO including file "PartPropSvc/PartPropSvc.py"
ApplicationMgr       INFO Successfully loaded modules: PartPropSvc
Athena               INFO including file "AthenaPoolCnvSvc/WriteAthenaPool_jobOptions.py"
Athena               INFO including file "AthenaPoolCnvSvc/AthenaPool_jobOptions.py"
ApplicationMgr       INFO Successfully loaded modules: PoolSvc, AthenaPoolCnvSvc, AthenaPoolCnvSvcPoolCnv
EventPersistenc...   INFO "CnvServices": ["DetDescrCnvSvc", "McCnvSvc", "AthenaPoolCnvSvc"]
Athena               INFO including file "AthenaSealSvc/AthenaSealSvc_joboptions.py"
ApplicationMgr       INFO Successfully loaded modules: AthenaSealSvc
Athena               INFO including file "AthenaSealSvc/AthenaSealSvcIgnore_joboptions.py"
Athena               INFO including file "GeneratorObjectsAthenaPool/GeneratorObjectsAthenaPool_joboptions.py"
ApplicationMgr       INFO Successfully loaded modules: GeneratorObjectsAthenaPoolPoolCnv
WARNING Algorithm factory for type TruthDemo already declared. Overwriting it.
WARNING Algorithm factory for type HistSample already declared. Overwriting it.
ApplicationMgr       INFO Successfully loaded modules: TruthExamples, Pythia_i
Athena               INFO including file "AthenaCommon/runbatch.py"
EventPersistenc...   INFO "CnvServices": ["DetDescrCnvSvc", "McCnvSvc", "AthenaPoolCnvSvc"]
HistogramPersis...WARNING Histograms saving not required.
 --------------- HepPDT Version 1.00.01 ---------------
found 258 particles
WARNING: $POOL_CATALOG is not defined
using default `xmlcatalog_file:PoolFileCatalog.xml'
XMLFileCatalog: level[Info] Connecting to the catalog
PoolXMLFileCatalog: level[Info] Xerces-c initialization Number 0
PoolXMLFileCatalog: level[Info] File PoolFileCatalog.xml does not exist, a new one is created
AtRndmGenSvc        ERROR bad Seeds property
PYTHIA 2840827 2545112526
ServiceManager      ERROR Unable to initialize Service: AtRndmGenSvc
ApplicationMgr      FATAL Invalid initial state
ApplicationMgr       INFO Application Manager Terminated successfully
[lxplus]

[lxplus]cat stdenis_91g-L0fBhPBWgA1kdxQ_xg/athena_gen.err
lcg_cr: No such file or directory

Oops. got the random number seed wrong.

Prepare2

[lxplus]cat jobOptions.pythia.vex06d_v1_003.py
###############################################################
#
# Job options file
#
#==============================================================
#--------------------------------------------------------------
# General Application Configuration options
#--------------------------------------------------------------
theApp.setup( MONTECARLO )
 
include( "PartPropSvc/PartPropSvc.py" )
 
# Add POOL persistency
 
include( "AthenaPoolCnvSvc/WriteAthenaPool_jobOptions.py" )
include( "GeneratorObjectsAthenaPool/GeneratorObjectsAthenaPool_joboptions.py" )
 
# 2101 = EventInfo
# 133273 = MCTruth (HepMC)
Stream1.ItemList += [ "2101#*", "133273#*" ]
include("AthenaSealSvc/AthenaSealSvc_joboptions.py" )
AthenaSealSvc.CheckDictionary = TRUE
Stream1.OutputFile = "vbf.pool.root"
 
 
#--------------------------------------------------------------
# Private Application Configuration options
#--------------------------------------------------------------
theApp.Dlls  += [ "TruthExamples", "Pythia_i" ]
theApp.TopAlg = ["Pythia","DumpMC"]
theApp.ExtSvc += ["AtRndmGenSvc"]
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
MessageSvc = Service( "MessageSvc" )
MessageSvc.OutputLevel               = 4
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
# Number of events to be processed (default is 10)
theApp.EvtMax = 100
# Set run number (default 0 causes problems)
 
EventSelector = Service("EventSelector")
EventSelector.RunNumber = 1337
#
#--------------------------------------------------------------
# Algorithms Private Options
#--------------------------------------------------------------
AtRndmGenSvc = Service( "AtRndmGenSvc" )
AtRndmGenSvc.Seeds = ["PYTHIA 7441506 478781120", "PYTHIA_INIT 331143 7159986"]
# AtRndmGenSvc.ReadFromFile = true;
Pythia = Algorithm( "Pythia" )
Pythia.PythiaCommand = ["pysubs msel 0","pysubs msub 124 1",
                        "pydat2 pmas 25 1 160",
                        "pypars mstp 61 2",
                        "pypars mstp 71 1",
                        "pypars mstp 81 0",
                        "pypars mstp 111 1",
                        "pydat3 mdme 190 1 0",
                        "pydat3 mdme 191 1 0",
                        "pydat3 mdme 192 1 0",
                        "pydat3 mdme 194 1 0",
                        "pydat3 mdme 195 1 0",
                        "pydat3 mdme 196 1 0",
                        "pydat3 mdme 198 1 0",
                        "pydat3 mdme 199 1 0",
                        "pydat3 mdme 200 1 0",
                        "pydat3 mdme 206 1 2",
                        "pydat3 mdme 207 1 3",
                        "pydat3 mdme 208 1 0",
                        "pydat3 mdme 210 1 0",
                        "pydat3 mdme 211 1 0",
                        "pydat3 mdme 212 1 0",
                        "pydat3 mdme 213 1 0",
                        "pydat3 mdme 214 1 0",
                        "pydat3 mdme 215 1 0",
                        "pydat3 mdme 218 1 0",
                        "pydat3 mdme 219 1 0",
                        "pydat3 mdme 220 1 0",
                        "pydat3 mdme 222 1 0",
                        "pydat3 mdme 223 1 0",
                        "pydat3 mdme 224 1 0",
                        "pydat3 mdme 225 1 0",
                        "pydat3 mdme 226 1 1",
                        "pydat3 mdme 174 1 0",
                        "pydat3 mdme 175 1 0",
                        "pydat3 mdme 176 1 0",
                        "pydat3 mdme 177 1 0",
                        "pydat3 mdme 178 1 0",
                        "pydat3 mdme 179 1 0",
                        "pydat3 mdme 182 1 0",
                        "pydat3 mdme 183 1 0",
                        "pydat3 mdme 184 1 0",
                        "pydat3 mdme 185 1 0",
                        "pydat3 mdme 186 1 0",
                        "pydat3 mdme 187 1 0"
                        ]
#---------------------------------------------------------------
# Ntuple service output
#---------------------------------------------------------------
#==============================================================
#
# End of job options file
#
###############################################################
[lxplus]

[lxplus]edg-job-submit -vo atlas -o jid1 e100_gengrid.jdl
 
Selected Virtual Organisation name (from --vo option): atlas
Connecting to host gdrb01.cern.ch, port 7772
Logging to host gdrb01.cern.ch, port 9002
 
================================ edg-job-submit Success =====================================
 The job has been successfully submitted to the Network Server.
 Use edg-job-status command to check job current status. Your job identifier (edg_jobId) is:
 
 - https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
 
 The edg_jobId has been saved in the following file:
 /afs/cern.ch/user/s/stdenis/testarea/10.0.1/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis-00-02-02/share/jid1
=============================================================================================
 
[lxplus]export JOB=https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
[lxplus]./ckjob
Sat Nov 26 02:18:00 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
Current Status:     Waiting
reached on:         Sat Nov 26 01:17:36 2005
*************************************************************
Sat Nov 26 02:20:01 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
Current Status:     Ready
Status Reason:      unavailable
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:19:06 2005
*************************************************************
 Sat Nov 26 02:21:02 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
Current Status:     Scheduled
Status Reason:      Job successfully submitted to Globus
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:20:01 2005
*************************************************************

 Sat Nov 26 02:24:54 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
Current Status:     Running
Status Reason:      Job successfully submitted to Globus
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:24:22 2005
*************************************************************
 
 Sat Nov 26 02:25:55 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
Current Status:     Done (Success)
Exit code:          0
Status Reason:      There were some warnings: some file(s) listed in the output sandbox were not available and were ignored
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:25:35 2005
*************************************************************





Results2

[lxplus]edg-job-get-output -dir . $JOB
 
Retrieving files from host: gdrb01.cern.ch ( for https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g )
 
*********************************************************************************
                        JOB GET OUTPUT OUTCOME
 
 Output sandbox files for the job:
 - https://gdrb01.cern.ch:9000/t7aBrx77KmPHTb79iLxV6g
 have been successfully retrieved and stored in the directory:
 /afs/cern.ch/user/s/stdenis/testarea/10.0.1/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis-00-02-02/share/stdenis_t7aBrx77KmPHTb79iLxV6g
 
*********************************************************************************
 [lxplus]ls -l stdenis_t7aBrx77KmPHTb79iLxV6g
total 2
-rw-r--r--    1 stdenis  zp            126 Nov 26 02:26 athena_gen.err
-rw-r--r--    1 stdenis  zp              0 Nov 26 02:26 athena_result.out
-rw-r--r--    1 stdenis  zp            224 Nov 26 02:26 CLIDDBout.txt


What happened to the output?

Prepare3


[lxplus]edg-job-submit -vo atlas -o jid1 e100_gengrid.jdl
 
Selected Virtual Organisation name (from --vo option): atlas
Connecting to host gdrb01.cern.ch, port 7772
Logging to host gdrb01.cern.ch, port 9002
 
================================ edg-job-submit Success =====================================
 The job has been successfully submitted to the Network Server.
 Use edg-job-status command to check job current status. Your job identifier (edg_jobId) is:
 
 - https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
 
 The edg_jobId has been saved in the following file:
 /afs/cern.ch/user/s/stdenis/testarea/10.0.1/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis-00-02-02/share/jid1
=============================================================================================
 
[lxplus]export JOB=https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
[lxplus]./ckjob
Sat Nov 26 02:31:22 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
Current Status:     Waiting
reached on:         Sat Nov 26 01:31:07 2005
*************************************************************
 
[lxplus]cat athena_gen03.sh
#!/bin/bash
source $VO_ATLAS_SW_DIR/software/10.0.1/setup.sh
source $SITEROOT/dist/10.0.1/Control/AthenaRunTime/*/cmt/setup.sh
cp $SITEROOT/dist/10.0.1/InstallArea/share/PDGTABLE.MeV .
# Run the job:
athena.py jobOptions.pythia.vex06d_v1_003.py > athena_result.out 2>&1
ls -l
hostname
lcg-cr -v --vo atlas -d se2-gla.scotgrid.ac.uk \
   -l lfn:stdenis_vbf_vex06d_v2_003.pool.root file://`pwd`/vbf.pool.root

Sat Nov 26 02:34:14 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
Current Status:     Scheduled
Status Reason:      Job successfully submitted to Globus
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:33:14 2005
*************************************************************
 Sat Nov 26 02:37:16 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
Current Status:     Running
Status Reason:      Job successfully submitted to Globus
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:36:33 2005
*************************************************************


Sat Nov 26 02:38:18 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/c9EbCsLIGo8ZbU083tzYCA
Current Status:     Done (Success)
Exit code:          0
Status Reason:      There were some warnings: some file(s) listed in the output sandbox were not available and were ignored
Destination:        lcgce01.gridpp.rl.ac.uk:2119/jobmanager-lcgpbs-S
reached on:         Sat Nov 26 01:37:49 2005
*************************************************************

Results3

Messed up the jdl. Screen output of pythia athena_gen_25oct051947.txt looked ok but hard to tell if stored ok, so retry this.
[lxplus]ls
crap  edglog.log  stdenis_c9EbCsLIGo8ZbU083tzYCA
[lxplus]ls stdenis_c9EbCsLIGo8ZbU083tzYCA
athena_gen.err  athena_result.out  CLIDDBout.txt

 [lxplus]cat stdenis_c9EbCsLIGo8ZbU083tzYCA/athena_gen.err
[lxplus]ls
crap  edglog.log  stdenis_c9EbCsLIGo8ZbU083tzYCA



Prepare4

[lxplus]cat e100_gengrid.jdl
############# Athena #################
Executable = "athena_gen03.sh";
StdOutput = "athena_gen.out";
StdError = "athena_gen.err";
InputSandbox = {"athena_gen03.sh","jobOptions.pythia.vex06d_v1_003.py"};
OutputSandbox = {"athena_gen.out","athena_gen.err","athena_result.out", "CLIDDBout.txt"};
Requirements = Member("VO-atlas-release-10.0.1", other.GlueHostApplicationSoftwareRunTimeEnvironment);
######################################
[lxplus]cat athena_gen03.sh
#!/bin/bash
source $VO_ATLAS_SW_DIR/software/10.0.1/setup.sh
source $SITEROOT/dist/10.0.1/Control/AthenaRunTime/*/cmt/setup.sh
cp $SITEROOT/dist/10.0.1/InstallArea/share/PDGTABLE.MeV .
# Run the job:
athena.py jobOptions.pythia.vex06d_v1_003.py > athena_result.out 2>&1
ls -l
hostname
lcg-cr -v --vo atlas -d se2-gla.scotgrid.ac.uk \
   -l lfn:stdenis_vbf_vex06d_v3_003.pool.root file://`pwd`/vbf.pool.root
[lxplus]cat jobOptions.pythia.vex06d_v1_003.py
###############################################################
#
# Job options file
#
#==============================================================
#--------------------------------------------------------------
# General Application Configuration options
#--------------------------------------------------------------
theApp.setup( MONTECARLO )
 
include( "PartPropSvc/PartPropSvc.py" )
 
# Add POOL persistency
 
include( "AthenaPoolCnvSvc/WriteAthenaPool_jobOptions.py" )
include( "GeneratorObjectsAthenaPool/GeneratorObjectsAthenaPool_joboptions.py" )
 
# 2101 = EventInfo
# 133273 = MCTruth (HepMC)
Stream1.ItemList += [ "2101#*", "133273#*" ]
include("AthenaSealSvc/AthenaSealSvc_joboptions.py" )
AthenaSealSvc.CheckDictionary = TRUE
Stream1.OutputFile = "vbf.pool.root"
 
 
#--------------------------------------------------------------
# Private Application Configuration options
#--------------------------------------------------------------
theApp.Dlls  += [ "TruthExamples", "Pythia_i" ]
theApp.TopAlg = ["Pythia","DumpMC"]
theApp.ExtSvc += ["AtRndmGenSvc"]
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
MessageSvc = Service( "MessageSvc" )
MessageSvc.OutputLevel               = 4
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
# Number of events to be processed (default is 10)
theApp.EvtMax = 100
# Set run number (default 0 causes problems)
 
EventSelector = Service("EventSelector")
EventSelector.RunNumber = 1337
#
#--------------------------------------------------------------
# Algorithms Private Options
#--------------------------------------------------------------
AtRndmGenSvc = Service( "AtRndmGenSvc" )
AtRndmGenSvc.Seeds = ["PYTHIA 7441506 478781120", "PYTHIA_INIT 331143 7159986"]
# AtRndmGenSvc.ReadFromFile = true;
Pythia = Algorithm( "Pythia" )
Pythia.PythiaCommand = ["pysubs msel 0","pysubs msub 124 1",
                        "pydat2 pmas 25 1 160",
                        "pypars mstp 61 2",
                        "pypars mstp 71 1",
                        "pypars mstp 81 0",
                        "pypars mstp 111 1",
                        "pydat3 mdme 190 1 0",
                        "pydat3 mdme 191 1 0",
                        "pydat3 mdme 192 1 0",
                        "pydat3 mdme 194 1 0",* [[https://twiki.cern.ch/twiki/pub/Main/RStDGenG25Nov05/rseed.cc][rseed.cc]]: 


                        "pydat3 mdme 195 1 0",
                        "pydat3 mdme 196 1 0",
                        "pydat3 mdme 198 1 0",
                        "pydat3 mdme 199 1 0",
                        "pydat3 mdme 200 1 0",
                        "pydat3 mdme 206 1 2",
                        "pydat3 mdme 207 1 3",
                        "pydat3 mdme 208 1 0",
                        "pydat3 mdme 210 1 0",
                        "pydat3 mdme 211 1 0",
                        "pydat3 mdme 212 1 0",
                        "pydat3 mdme 213 1 0",
                        "pydat3 mdme 214 1 0",
                        "pydat3 mdme 215 1 0",
                        "pydat3 mdme 218 1 0",
                        "pydat3 mdme 219 1 0",
                        "pydat3 mdme 220 1 0",
                        "pydat3 mdme 222 1 0",
                        "pydat3 mdme 223 1 0",
                        "pydat3 mdme 224 1 0",
                        "pydat3 mdme 225 1 0",
                        "pydat3 mdme 226 1 1",
                        "pydat3 mdme 174 1 0",
                        "pydat3 mdme 175 1 0",
                        "pydat3 mdme 176 1 0",
                        "pydat3 mdme 177 1 0",
                        "pydat3 mdme 178 1 0",
                        "pydat3 mdme 179 1 0",
                        "pydat3 mdme 182 1 0",
                        "pydat3 mdme 183 1 0",
                        "pydat3 mdme 184 1 0",
                        "pydat3 mdme 185 1 0",
                        "pydat3 mdme 186 1 0",
                        "pydat3 mdme 187 1 0"
                        ]
#---------------------------------------------------------------
# Ntuple service output
#---------------------------------------------------------------
#==============================================================
#
# End of job options file
#
###############################################################
[lxplus]


Results4

[lxplus]./ckjob
Sat Nov 26 02:50:12 CET 2005
 
 
*************************************************************
BOOKKEEPING INFORMATION:
 
Status info for the Job : https://gdrb01.cern.ch:9000/KpfCooc5rydB3gPtUQbjPg
Current Status:     Done (Success)
Exit code:          0
Status Reason:      Job terminated successfully
Destination:        zeus02.cyf-kr.edu.pl:2119/jobmanager-lcgpbs-atlas
reached on:         Sat Nov 26 01:47:28 2005
*************************************************************
 
c
[lxplus]
[lxplus]
[lxplus]cd
[lxplus]cd /tmp/stdenis
[lxplus]edg-job-get-output -dir . $JOB                                                                                                                 
Retrieving files from host: gdrb01.cern.ch ( for https://gdrb01.cern.ch:9000/KpfCooc5rydB3gPtUQbjPg )
 
*********************************************************************************
                        JOB GET OUTPUT OUTCOME
 
 Output sandbox files for the job:
 - https://gdrb01.cern.ch:9000/KpfCooc5rydB3gPtUQbjPg
 have been successfully retrieved and stored in the directory:
 /tmp/stdenis/stdenis_KpfCooc5rydB3gPtUQbjPg
 
*********************************************************************************
 
[lxplus]

[lxplus]cat  stdenis_KpfCooc5rydB3gPtUQbjPg/athena_gen.err
[lxplus]cat  stdenis_KpfCooc5rydB3gPtUQbjPg/athena_gen.out
total 4872
-rwxr-xr-x    1 atlas014 atlas         407 Nov 26 02:43 athena_gen03.sh
-rw-r--r--    1 atlas014 atlas           0 Nov 26 02:43 athena_gen.err
-rw-r--r--    1 atlas014 atlas           0 Nov 26 02:43 athena_gen.out
-rw-r--r--    1 atlas014 atlas     3084719 Nov 26 02:44 athena_result.out
-rw-r--r--    1 atlas014 atlas          56 Nov 26 02:44 AtRndmGenSvc.out
-rw-r--r--    1 atlas014 atlas         224 Nov 26 02:44 CLIDDBout.txt
-rw-r--r--    1 atlas014 atlas        4349 Nov 26 02:43 jobOptions.pythia.vex06d_v1_003.py
-rwxr-xr-x    1 atlas014 atlas       32957 Nov 26 02:43 PDGTABLE.MeV
-rw-r--r--    1 atlas014 atlas         325 Nov 26 02:44 PoolFileCatalog.xml
-rw-r--r--    1 atlas014 atlas     1828194 Nov 26 02:44 vbf.pool.root
zeus69.cyf-kr.edu.pl
Using grid catalog type: edg
Source URL: file:///home/atlas014/globus-tmp.zeus69.23414.0/WMS_zeus69_023884_https_3a_2f_2fgdrb01.cern.ch_3a9000_2fKpfCooc5rydB3gPtUQbjPg/vbf.pool.root
File size: 1828194
VO name: atlas
Destination specified: se2-gla.scotgrid.ac.uk
Destination URL for copy: gsiftp://pool1-gla.scotgrid.ac.uk/pool1-gla:/gridstorage/d1/atlas/2005-11-26/fileeca6dc98-ce35-4883-bf34-ae84d8554f12.2159.0
# streams: 1
# set timeout to 0 seconds
Alias registered in Catalog: lfn:stdenis_vbf_vex06d_v3_003.pool.root
 
Transfer took 6070 ms
Destination URL registered in Catalog: srm://se2-gla.scotgrid.ac.uk/dpm/scotgrid.ac.uk/home/atlas/generated/2005-11-26/fileeca6dc98-ce35-4883-bf34-ae84d8554f12
guid:c096ae9a-4528-4b6c-bc2e-c352b1ce5867
[lxplus]

Finally ok, results here: athena_gen_25oct051954.txt.
Major updates:
-- RichardStDenis - 26 Nov 2005

%RESPONSIBLE%
%REVIEW%

Edit | Attach | Watch | Print version | History: r2 < r1 | Backlinks | Raw View | WYSIWYG | More topic actions
Topic revision: r2 - 2005-11-29 - RichardStDenis
 
    • Cern Search Icon Cern Search
    • TWiki Search Icon TWiki Search
    • Google Search Icon Google Search

    Main All webs login

This site is powered by the TWiki collaboration platform Powered by PerlCopyright &© 2008-2024 by the contributing authors. All material on this collaboration platform is the property of the contributing authors.
or Ideas, requests, problems regarding TWiki? use Discourse or Send feedback