Skip to content
Snippets Groups Projects
Commit c4906910 authored by Thomas Van Vark's avatar Thomas Van Vark
Browse files

Fix linter

parent 029ded34
No related tags found
1 merge request!7Job/hostimmunity
Pipeline #6848384 passed
...@@ -2,7 +2,6 @@ import requests ...@@ -2,7 +2,6 @@ import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import pandas as pd import pandas as pd
from io import BytesIO from io import BytesIO
import json
from utils import update from utils import update
...@@ -45,7 +44,8 @@ def get_summary_xlsx(): ...@@ -45,7 +44,8 @@ def get_summary_xlsx():
return response.content return response.content
else: else:
raise f"Failed to retrieve the .xlsx file. Status code: {response.status_code}" raise f"Failed to retrieve the .xlsx file. Status code: {response.status_code}"
def filter_xlsx(xlsx): def filter_xlsx(xlsx):
# Read the Excel file into a pandas DataFrame # Read the Excel file into a pandas DataFrame
primary_series_studies = pd.read_excel(BytesIO(xlsx), sheet_name="Primary Series Studies") primary_series_studies = pd.read_excel(BytesIO(xlsx), sheet_name="Primary Series Studies")
...@@ -53,7 +53,8 @@ def filter_xlsx(xlsx): ...@@ -53,7 +53,8 @@ def filter_xlsx(xlsx):
primary_series_studies = primary_series_studies[primary_series_studies["outcome_category"].isin(filters)] primary_series_studies = primary_series_studies[primary_series_studies["outcome_category"].isin(filters)]
booster_studies = booster_studies[booster_studies["outcome_category"].isin(filters)] booster_studies = booster_studies[booster_studies["outcome_category"].isin(filters)]
return (primary_series_studies, booster_studies) return (primary_series_studies, booster_studies)
def get_primary_ve_values(df): def get_primary_ve_values(df):
# calculate the VE value # calculate the VE value
...@@ -63,7 +64,6 @@ def get_primary_ve_values(df): ...@@ -63,7 +64,6 @@ def get_primary_ve_values(df):
return ve_data.to_dict('records') return ve_data.to_dict('records')
def get_booster_ve_values(df): def get_booster_ve_values(df):
# create df without the ' or ' substring in primary vaccines # create df without the ' or ' substring in primary vaccines
rows_with_or = df[df['primary series vaccine'].str.contains(' or ')] rows_with_or = df[df['primary series vaccine'].str.contains(' or ')]
...@@ -81,7 +81,7 @@ def get_booster_ve_values(df): ...@@ -81,7 +81,7 @@ def get_booster_ve_values(df):
new_rows_with_or.at[index, 'primary series vaccine'] = row['primary series vaccine'].split(' or ')[0] new_rows_with_or.at[index, 'primary series vaccine'] = row['primary series vaccine'].split(' or ')[0]
rows_to_add.loc[index] = new_rows_with_or.loc[index] rows_to_add.loc[index] = new_rows_with_or.loc[index]
new_rows_with_or.at[index, 'primary series vaccine'] = row['primary series vaccine'].split(' or ')[1] new_rows_with_or.at[index, 'primary series vaccine'] = row['primary series vaccine'].split(' or ')[1]
rows_to_add.loc[len(rows_indexes)+index] = new_rows_with_or.loc[index] rows_to_add.loc[len(rows_indexes) + index] = new_rows_with_or.loc[index]
# merge the dataframe without the ' or ' with the new dataframe that has the rows divided in two # merge the dataframe without the ' or ' with the new dataframe that has the rows divided in two
final_df = pd.concat([df_without_or, rows_to_add]).reset_index().drop(columns=['index']) final_df = pd.concat([df_without_or, rows_to_add]).reset_index().drop(columns=['index'])
...@@ -102,6 +102,7 @@ def convert_primary_dict(result): ...@@ -102,6 +102,7 @@ def convert_primary_dict(result):
return new return new
xlsx = get_summary_xlsx() xlsx = get_summary_xlsx()
(primary_series_studies, booster_studies) = filter_xlsx(xlsx) (primary_series_studies, booster_studies) = filter_xlsx(xlsx)
vaccine_primary_host_immunity = convert_primary_dict(get_primary_ve_values(primary_series_studies)) vaccine_primary_host_immunity = convert_primary_dict(get_primary_ve_values(primary_series_studies))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment