Commit 88e80f5e authored by Elvin Sindrilaru's avatar Elvin Sindrilaru

MGM: Use XRootD connection pool for the central draining if this is enabled

There are two env variabled that control the connection pool, namely:
EOS_XRD_USER_CONNECTION_POOL - enable the xrootd connection pool
EOS_XRD_CONNECTION_POOL_SIZE - max number of unique phisical connection
towards a particular host.
parent b371f105
......@@ -105,6 +105,7 @@
#include "common/FileId.hh"
#include "common/FileSystem.hh"
#include "common/AssistedThread.hh"
#include "common/XrdConnPool.hh"
#include "mq/XrdMqMessaging.hh"
#include "mgm/proc/ProcCommand.hh"
#include "mgm/drain/Drainer.hh"
......@@ -1610,6 +1611,7 @@ public:
int mFusexPort; ///< port of the FUSEX brocasz MQZ, default 1100
bool mTapeAwareGcDefaultSpaceEnable; ///< Flag to mark if tape aware garbage collection should be enabled
TapeAwareGc& mTapeAwareGc; ///< Tape aware garbage collector
eos::common::XrdConnPool mXrdConnPool; ///< XRD connection pool
private:
std::map<std::string, XrdMgmOfsDirectory*>
......
......@@ -99,6 +99,11 @@ DrainTransferJob::DoIt()
return;
}
// If enabled use xrootd connection pool to avoid bottelnecks on the
// same physical connection
eos::common::XrdConnIdHelper src_id_helper(gOFS->mXrdConnPool, url_src);
eos::common::XrdConnIdHelper dst_id_helper(gOFS->mXrdConnPool, url_dst);
// Populate the properties map of the transfer
XrdCl::PropertyList properties;
properties.Set("force", true);
properties.Set("posc", false);
......@@ -120,8 +125,9 @@ DrainTransferJob::DoIt()
XrdCl::CopyProcess cpy;
cpy.AddJob(properties, &result);
XrdCl::XRootDStatus prepare_st = cpy.Prepare();
eos_info("[tpc]: %s => %s logid=%s prepare_msg=%s",
url_src.GetLocation().c_str(), url_dst.GetLocation().c_str(),
eos_info("[tpc]: id=%s url=%s => id=%s url=%s logid=%s prepare_msg=%s",
url_src.GetHostId().c_str(), url_src.GetLocation().c_str(),
url_dst.GetHostId().c_str(), url_dst.GetLocation().c_str(),
log_id.c_str(), prepare_st.ToStr().c_str());
if (prepare_st.IsOK()) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment