fetch-from-web-gitlab 1.97 KB
Newer Older
Andrea Sciaba's avatar
Andrea Sciaba committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
#!/bin/bash
#
# usage : fetch-from-web URL Output-file
#  output file must not exits
#
# exit code:
#   0: CVS copy retrieved and copied to output-file
#   1: command did not succeed
#
# Things that may fail:
#   wrong arguments
#   curl missing
#   curl fails
#  

url=$1
file=$2

if [ -z "$url" -o -z "$file" ] ; then
    exit 1
fi

# Look for curl
24
if [[ $SCRAM_ARCH == slc6* ]] ; then
25
    source /cvmfs/cms.cern.ch/crab3/crab.sh prod
26
else
27
    source /cvmfs/cms.cern.ch/crab3/crab.sh prod
28
fi
Andrea Sciaba's avatar
Andrea Sciaba committed
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
if [ ! type -f curl > /dev/null 2>&1 ] ; then
    echo "ERROR: cannot find curl"
    exit 1
fi

ConfigFile=${CMS_PATH}/SITECONF/local/JobConfig/site-local-config.xml

# Try to retrieve the CVS copy via Squid; if it fails, try without Squid
squidUrl=`grep proxy $ConfigFile | head -1 | cut -d'"' -f 2`
if [ -z $squidUrl ] ; then
    useSquid=0
    echo "WARNING: failed to find squidUrl in $ConfigFile"
else
    useSquid=1

# squids at CERN, RAL and London, can only be used for FroNtier  
    ( echo $squidUrl | grep -q 'cmst0frontier.*\.cern\.ch' ) && useSquid=0
    ( echo $squidUrl | grep -q 'pp\.rl\.ac\.uk' ) && useSquid=0
    ( echo $squidUrl | grep -q 'hep\.ph\.ic\.ac\.uk' ) && useSquid=0
fi

if [ $useSquid == 1 ] ; then
    export http_proxy=$squidUrl
    echo "http_proxy: ${http_proxy}"
fi

wgetOutput=`mktemp`
56
cmd="curl --fail --request GET --header \"PRIVATE-TOKEN: kuD9s3WrDGNXZ8BvxFV2\" $url -o $file"
Andrea Sciaba's avatar
Andrea Sciaba committed
57
58
59
60
61
62

cmdFile=`mktemp`
echo "$cmd > $wgetOutput 2>&1" > $cmdFile
source $cmdFile
rc=$?
if [ $rc != 0 ] ; then
63
    echo "wget error code: $rc"
Andrea Sciaba's avatar
Andrea Sciaba committed
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
    cat $wgetOutput
    if [ $useSquid == 0 ] ; then
	echo "ERROR: Failed to retrieve $file from $url"
    else
	echo -n "WARNING: Failed to retrieve $file from $url. Trying without squid... "
	unset http_proxy
	source $cmdFile
	rc=$?
	if [ $rc != 0 ] ; then
	    echo
	    echo "ERROR: Failed to retrieve $file from $url without squid"
	else
	    rc=0
	    echo "Succeeded!"
	fi
    fi
fi
rm -f $wgetOutput $cmdFile

if [ $rc != 0 ] ; then
    exit 1
else
    exit 0
fi