Commit 2ea88472 authored by Mihai Patrascoiu's avatar Mihai Patrascoiu
Browse files

Merge branch 'FTS-1702' into 'develop'

FTS-1702 Destination file integrity check

See merge request !49
parents 54b0b74f cf384d19
Pipeline #3005651 passed with stage
in 2 minutes and 53 seconds
......@@ -188,6 +188,15 @@ E.
Dropbox).
.RS
.RE
.TP
.B --dst_file_report
Report on the destination tape file if archiving to tape, overwrite is not
switched on and the tape file already exists. The report will include both the
size and checksum of the file. This integrity check can be used by the end user
or system to decide whether or not to stop trying to archive the file to tape
because it already exists.
.RS
.RE
.SH EXAMPLE
.IP
.nf
......
......@@ -41,6 +41,7 @@ DEFAULT_PARAMS = {
'copy_pin_lifetime': -1,
'bring_online': -1,
'archive_timeout': -1,
'dst_file_report': False,
'timeout': None,
'fail_nearline': False,
'retry': 0,
......@@ -145,6 +146,8 @@ class JobSubmitter(Base):
help='bring online timeout in seconds.')
self.opt_parser.add_option('--archive-timeout', dest='archive_timeout', type='long',
help='archive timeout in seconds.')
self.opt_parser.add_option('--dst-file-report', dest='dst_file_report', default=False, action='store_true',
help='report on the destination tape file if it already exists and overwrite is off.')
self.opt_parser.add_option('--timeout', dest='timeout', type='long',
help='transfer timeout in seconds.')
self.opt_parser.add_option('--fail-nearline', dest='fail_nearline', action='store_true',
......@@ -244,6 +247,7 @@ class JobSubmitter(Base):
checksum=self.checksum,
bring_online=self.options.bring_online,
archive_timeout=self.options.archive_timeout,
dst_file_report=self.options.dst_file_report,
timeout = self.options.timeout,
verify_checksum=checksum_mode[0],
spacetoken=self.options.destination_token,
......
......@@ -46,6 +46,7 @@ class Job(Base):
space_token = Column(String(255))
internal_job_params = Column(String(255))
overwrite_flag = Column(Flag)
dst_file_report = Column(Flag)
job_finished = Column(DateTime)
source_space_token = Column(String(255))
copy_pin_lifetime = Column(Integer)
......@@ -92,6 +93,7 @@ class ArchivedJob(Base):
space_token = Column(String(255))
internal_job_params = Column(String(255))
overwrite_flag = Column(Flag)
dst_file_report = Column(Flag)
job_finished = Column(DateTime)
source_space_token = Column(String(255))
copy_pin_lifetime = Column(Integer)
......
......@@ -105,7 +105,7 @@ def add_alternative_source(transfer, alt_source):
def new_job(transfers=None, deletion=None, verify_checksum=False, reuse=None, overwrite=False, multihop=False,
source_spacetoken=None, spacetoken=None,
bring_online=None, archive_timeout=None, copy_pin_lifetime=None,
bring_online=None, archive_timeout=None, dst_file_report=False, copy_pin_lifetime=None,
retry=-1, retry_delay=0, metadata=None, priority=None, strict_copy=False,
max_time_in_queue=None, timeout=None,
id_generator=JobIdGenerator.standard, sid=None,
......@@ -124,6 +124,7 @@ def new_job(transfers=None, deletion=None, verify_checksum=False, reuse=None, ov
spacetoken: Destination space token
bring_online: Bring online timeout
archive_timeout: Archive timeout
dst_file_report: Report on the destination tape file if it already exists and overwrite is off
copy_pin_lifetime: Pin lifetime
retry: Number of retries: <0 is no retries, 0 is server default, >0 is whatever value is passed
metadata: Metadata to bind to the job
......@@ -152,6 +153,7 @@ def new_job(transfers=None, deletion=None, verify_checksum=False, reuse=None, ov
spacetoken=spacetoken,
bring_online=bring_online,
archive_timeout=archive_timeout,
dst_file_report=dst_file_report,
copy_pin_lifetime=copy_pin_lifetime,
job_metadata=metadata,
source_spacetoken=source_spacetoken,
......
......@@ -47,6 +47,7 @@ DEFAULT_PARAMS = {
'gridftp': '',
'job_metadata': None,
'overwrite': False,
'dst_file_report': False,
'reuse': None,
'multihop': False,
'source_spacetoken': '',
......@@ -446,6 +447,8 @@ class JobBuilder(object):
activity=file_dict.get('activity', 'default'),
hashed_id=shared_hashed_id if shared_hashed_id else _generate_hashed_id()
)
if f['file_metadata'] != None:
f['file_metadata'] = _metadata(f['file_metadata'])
self.files.append(f)
def _apply_selection_strategy(self):
......@@ -504,6 +507,7 @@ class JobBuilder(object):
priority=max(min(int(self.params['priority']), 5), 1),
space_token=self.params['spacetoken'],
overwrite_flag=_safe_flag(self.params['overwrite']),
dst_file_report=_safe_flag(self.params['dst_file_report']),
source_space_token=self.params['source_spacetoken'],
copy_pin_lifetime=int(self.params['copy_pin_lifetime']),
checksum_method=self.params['verify_checksum'],
......@@ -641,6 +645,7 @@ class JobBuilder(object):
priority=3,
space_token=self.params['spacetoken'],
overwrite_flag='N',
dst_file_report='N',
source_space_token=self.params['source_spacetoken'],
copy_pin_lifetime=-1,
checksum_method=None,
......
......@@ -77,6 +77,9 @@ paramSchema = {
'overwrite': {
'type': ['boolean', 'null']
},
'dst_file_report': {
'type': ['boolean', 'null']
},
'gridftp': {
'type': ['string', 'null'],
'title': 'Reserved for future usage'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment