Skip to content
Snippets Groups Projects
Commit c4cd4e26 authored by Quentin Codelupi's avatar Quentin Codelupi
Browse files

[clean] some duplicates code

parent 4faa1a6d
No related branches found
No related tags found
No related merge requests found
# Default ignored files
/workspace.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<Languages>
<language minSize="146" name="Python" />
</Languages>
</inspection_tool>
</profile>
</component>
\ No newline at end of file
...@@ -47,6 +47,10 @@ sums_cols = udf(lambda arr: 0 if arr == [] else __builtins__.sum(arr), IntegerTy ...@@ -47,6 +47,10 @@ sums_cols = udf(lambda arr: 0 if arr == [] else __builtins__.sum(arr), IntegerTy
''' '''
def write_file(_device, _start_time, _end_time, _property, data):
write_file(_device, _start_time, _end_time, _property, data)
def uncompress(_data_array, _index_array, _size, _nrows): def uncompress(_data_array, _index_array, _size, _nrows):
print('uncompress start') print('uncompress start')
result = np.zeros(_size, dtype=np.int) result = np.zeros(_size, dtype=np.int)
...@@ -243,14 +247,7 @@ def pull_integral(_start_time, _end_time, _device): ...@@ -243,14 +247,7 @@ def pull_integral(_start_time, _end_time, _device):
db_cursor = mydb.cursor() db_cursor = mydb.cursor()
db_cursor.execute('UPDATE data SET integral = %s where id = %s', (json.dumps(data), data_id,)) db_cursor.execute('UPDATE data SET integral = %s where id = %s', (json.dumps(data), data_id,))
if _files: write_file(_device, _start_time, _end_time, _property, data)
# Writing output #
with open(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat', 'w') as outfile:
json.dump(data, outfile)
print(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat' + " Pulled")
# Not Working with too mny dat (dump memory) # Not Working with too mny dat (dump memory)
...@@ -323,14 +320,7 @@ def pull_raw_dist(_start_time, _end_time, _device): ...@@ -323,14 +320,7 @@ def pull_raw_dist(_start_time, _end_time, _device):
db_cursor = mydb.cursor() db_cursor = mydb.cursor()
db_cursor.execute('UPDATE data SET raw_dist = %s where id = %s', (json.dumps(data), data_id,)) db_cursor.execute('UPDATE data SET raw_dist = %s where id = %s', (json.dumps(data), data_id,))
if _files: write_file(_device, _start_time, _end_time, _property, data)
# Writing output #
with open(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat', 'w') as outfile:
json.dump(data, outfile)
print(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat' + " Pulled")
def pull_integral_dist(_start_time, _end_time, _device): def pull_integral_dist(_start_time, _end_time, _device):
...@@ -406,14 +396,7 @@ def pull_integral_dist(_start_time, _end_time, _device): ...@@ -406,14 +396,7 @@ def pull_integral_dist(_start_time, _end_time, _device):
db_cursor = mydb.cursor() db_cursor = mydb.cursor()
db_cursor.execute('UPDATE data SET integral_dist = %s where id = %s', (json.dumps(data), data_id,)) db_cursor.execute('UPDATE data SET integral_dist = %s where id = %s', (json.dumps(data), data_id,))
if _files: write_file(_device, _start_time, _end_time, _property, data)
# Writing output #
with open(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat', 'w') as outfile:
json.dump(data, outfile)
print(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat' + " Pulled")
def pull_turnloss(_start_time, _end_time, _device): def pull_turnloss(_start_time, _end_time, _device):
...@@ -504,14 +487,7 @@ def pull_turnloss(_start_time, _end_time, _device): ...@@ -504,14 +487,7 @@ def pull_turnloss(_start_time, _end_time, _device):
db_cursor = mydb.cursor() db_cursor = mydb.cursor()
db_cursor.execute('UPDATE data SET turnloss = %s where id = %s', (json.dumps(data), data_id,)) db_cursor.execute('UPDATE data SET turnloss = %s where id = %s', (json.dumps(data), data_id,))
if _files: write_file(_device, _start_time, _end_time, _property, data)
# Writing output #
with open(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat', 'w') as outfile:
json.dump(data, outfile)
print(_dest_dir + '/' + _device + '_' + _start_time.replace(' ', '_') + 'to'
+ _end_time.replace(' ', '_') + '_' + _property + '.dat' + " Pulled")
if __name__ == '__main__': if __name__ == '__main__':
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment