Skip to content

Commit

Permalink
Make exporting with rows = currently levels
Browse files Browse the repository at this point in the history
Test fix
  • Loading branch information
grigsos committed Mar 26, 2023
1 parent 2fec1fb commit a53e647
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 28 deletions.
5 changes: 4 additions & 1 deletion src/main.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from src.methods import data_processing, data_validation, initialisation, report_generator, data_import
from src.methods import data_processing, data_validation, initialisation, data_import
import time #to remove possibly as log time is enough
import logging

Expand All @@ -13,6 +13,9 @@ def main():
data_processing.main(testVar)
#dontforget to use hashList

from src.methods import report_generator # to make global variable work
report_generator.main(mainFolderPath,configFileMain,hashL,testVar)

logging.info("Program ended")
logging.info("Program runtime: " + f"{(time.time() - start_time):.2f}" + " seconds")
#data_import.getFiles()
Expand Down
25 changes: 17 additions & 8 deletions src/methods/data_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,21 @@


def main(inputFolder, configFile,xlList):
global subCompListy
logging.info("M - Data import started")
cC = configFile.get("Settings", "comparisonColumn")[1:-1]
lC = configFile.get("Settings", "levelColumn")[1:-1]
sR = int(configFile.get("Settings", "startingRow"))
cLG = int(configFile.get("Settings", "comparisonLevelGap"))
subCompList = eval(configFile.get("Settings", "numberOfSubComponents"))
subCompListy, subCompNumList = extract_subdimensions(configFile)
special_sheets_list = [] #list of special sheets

for key in configFile['Special_sheets']:
special_sheets_list.append(eval(configFile['Special_sheets'][key]))

worksheetList = get_first_X_worksheet_names(xlList[0], len(subCompList))
worksheetList = get_first_X_worksheet_names(xlList[0], len(subCompNumList))

x=start_import(xlList, worksheetList, cC, sR,subCompList,cLG,lC,special_sheets_list)
x=start_import(xlList, worksheetList, cC, sR,subCompNumList,cLG,lC,special_sheets_list)
#print(x)


Expand All @@ -54,16 +55,16 @@ def start_import(pathList, worksheetList, comparisonColumn, startingRow,subCompy

lList,wList, dimList, intList = get_data(pathList, worksheetList, levelColumn, comparisonColumn, startingRow,subCompyList,comparisonLevelGap,SSList)

print(get_krippendorff_DF(pd.concat(wList,axis=0)))
#print(get_krippendorff_DF(pd.concat(wList,axis=0)))
mWL,oG=simplify_krip(3)
for i, df in enumerate(wList):
wList[i] = df.applymap(lambda x: replace_with_lists(x, oG, mWL))


verticalAdd= pd.concat(wList,axis=0) #join all dataframes vertically
print(get_krippendorff_DF(verticalAdd))
#print(get_krippendorff_DF(verticalAdd))
#print(get_krippendorff_DF(pd.concat(lList,axis=0)))
return wList[0].iloc[:,0]
return lList

def get_data(pathList, worksheetList, levelColumn, weightColumn, startingRow, subCompyList, comparisonLevelGap,sSL):
logging.info("Level and weight and special sheete import started")
Expand Down Expand Up @@ -97,7 +98,7 @@ def get_data(pathList, worksheetList, levelColumn, weightColumn, startingRow, su
xl.close()


print(interdimensionalList)
#print(interdimensionalList)
logging.info("Level and weight and special sheete import finished successfully")
return levelList, weightList, dimensionList, interdimensionalList

Expand Down Expand Up @@ -138,7 +139,15 @@ def simplify_krip(nWanted): #simplify krippendorff alpha
[odd_numbers_no_median[i:i+num_odd_per_group] for i in range(0, len(odd_numbers_no_median), num_odd_per_group)]
odd_groups.sort()
return my_wantedList,odd_groups


def extract_subdimensions(CF):
subList = []
for option in CF.options('Subcatergories'):
value = eval(CF.get('Subcatergories', option))
if option != '':
subList.append(value)
return subList, [len(l) for l in subList]

def replace_with_lists(num,odd_groups,my_wantedList): #replace with lists
for sublist in odd_groups:
if num in sublist:
Expand Down
2 changes: 1 addition & 1 deletion src/methods/data_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def main(data):

#data = pd.DataFrame({'a': [3,0.5,0.5,1,0.5,0.25,0.25,0.25,0.25,0.3333,2,0.5,3,2,0.3333]})
#print(data)
matrix = matrix_make(data)
#matrix = matrix_make(data)
#print(ahp(matrix,RI[matrix.shape[0]]))
logging.info("M - Data processing finished successfully")
return None
Expand Down
68 changes: 53 additions & 15 deletions src/methods/report_generator.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,25 @@
import logging
import os
import csv
import sys
import datetime
import pandas as pd

from src.methods.data_import import subCompListy

##############Main function##############
def main(mFP,cFM):
def main(mFP,cFM,hashList,testDataframe):
logging.info("Report generation started")
outFolderPath = check_output_folder(cFM.get("Settings", "outputFolder"),mFP)
make_csv(outFolderPath)
logging.info("Report generation finished successfully")
outFolderPath = check_output_folder(cFM.get("Settings", "outputFolder"),mFP) #check if output folder exists
listDimension,combinedList=get_subsection(cFM) #get subsections and dimensions from config file
sOF=make_folder(outFolderPath,len(hashList)) #returns specific output folder


csv_levels(testDataframe,sOF,combinedList) #generates csv file with levels


logging.info("Report generation finished successfully")
return None



Expand All @@ -18,8 +28,16 @@ def main(mFP,cFM):

#File that generates the report

#make .csv of results
def get_subsection(CF):
dimList = eval(CF.get("Dimensions","dimensions"))

combinedList=[]

for i in range(len(dimList)):
for n in range(len(subCompListy[i])):
combinedList.append(f"{dimList[i]} - {subCompListy[i][n]}")

return dimList,combinedList
#maybe make pdf with graphs
def check_output_folder(folderName,pathFold): #check if excel survey folder exists
joinedOutputPath = os.path.join(pathFold,folderName)
Expand All @@ -33,18 +51,38 @@ def check_output_folder(folderName,pathFold): #check if excel survey folder exis

def make_header():

return None
return -1

def make_csv(oFP):
filepath = os.path.join(oFP, get_current_time() + " - " + get_num_participants() + " participant(s)"+ ".csv")
with open(filepath, "w", newline="") as file:
writer = csv.writer(file)
return None
def make_folder(oFP,num_participants):
folder_name = get_current_time() + " - " + str(num_participants) + " participant(s)"
specificOutFolder = os.path.join(oFP, folder_name)

def get_num_participants():
#todo: get number of participants from the survey
return str(5)
if not os.path.exists(os.path.join(oFP, folder_name)):
os.makedirs(os.path.join(oFP, folder_name))
logging.info("Directory created successfully!")
else:
logging.info("Directory already exists!")
logging.log(logging.CRITICAL, "Program aborted, Program run twice in single second")
sys.exit()
return specificOutFolder

def get_current_time():
now = datetime.datetime.now()
return now.strftime('%Y-%m-%dT%H_%M_%S')
return now.strftime('%Y-%m-%dT%H_%M_%S')


##############csv generation##############

def csv_make_weights():
return None


def csv_levels(df,sOF,combL):
verticalAdd= pd.concat(df,axis=0)
verticalAdd.index=combL


#newDf = pd.DataFrame(verticalAdd.values, index=combL, columns=verticalAdd.columns)
testPath=os.path.join(sOF, "levels.csv")
verticalAdd.to_csv(testPath, index=True)
return None
3 changes: 0 additions & 3 deletions tests/test_data_import.py

This file was deleted.

0 comments on commit a53e647

Please sign in to comment.