Skip to content

Commit

Permalink
fixed
Browse files Browse the repository at this point in the history
  • Loading branch information
444thLiao committed Apr 24, 2018
1 parent c7ad3f3 commit dc92e26
Show file tree
Hide file tree
Showing 5 changed files with 19 additions and 9 deletions.
9 changes: 7 additions & 2 deletions aft_pipelines_analysis/aft_pipelines_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def check_path(path):
os.system('mkdir -p %s ' % path)


for opath in [somatic_csv, germline_csv, vcf_path, final_csv, filtered_csv, pcgr_output, pack_result]:
for opath in [somatic_csv, germline_csv, vcf_path, final_csv, filtered_csv, pcgr_output, pack_result,info_summary_path]:
check_path(opath)


Expand Down Expand Up @@ -162,6 +162,10 @@ def remind_text(local_project_path):
base_outpath, server_script_path, bed_file_path, REF_file_path)
remind_run_command += '''for each in %s/XK_result/*/*recal_reads.bam; do python %s/pre_pipelines_analysis/cal_Cov_script_version.py -b $each -B %s -r %s & done''' % (
base_outpath, server_script_path, bed_file_path, REF_file_path)
remind_run_command += '''
##run script in order to generate accessment file. \n\n \
python %s/aft_pipelines_analysis/run_info_summary.py %s \n''' % (
server_script_path, server_setting_path)
remind_run_command += '''
##run script which is fetch cov_info from .info file and add it into csvfile. \n\n \
python2 Whole_pipelines/aft_pipelines_analysis/run_add_per_info_into_csv.py %s \n''' % server_setting_path
Expand All @@ -171,6 +175,7 @@ def remind_text(local_project_path):
def draw_coverage_depths():
from draw_quality_line import draw_coverage_depths as dcd
dcd(info_summary_path, NORMAL_SIG, TUMOR_SIG)
print('finish drawing.')


if __name__ == '__main__':
Expand All @@ -186,7 +191,7 @@ def draw_coverage_depths():

server_setting_path = os.path.join(local_project_path, 'setting.py')
if '1' in args:
if input('Download ? Y/y').upper() == 'Y':
if str(input('Download ? Y/y')).upper() == 'Y':
download_scp()
else:
print('wrong command,just pass.')
Expand Down
2 changes: 2 additions & 0 deletions aft_pipelines_analysis/draw_quality_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import sys

def draw_coverage_depths(dir_info,NORMAL_SIG,TUMOR_SIG):
draw_data = []
layout = dict(yaxis=dict(exponentformat='e'))
all_info = list(glob.glob(dir_info+'/*cov_summary.info'))
for data_path in all_info:
data_df = pd.read_csv(data_path,sep='\t',index_col=0)
Expand Down
10 changes: 5 additions & 5 deletions aft_pipelines_analysis/filter_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pandas,time
import argparse
import os, sys, glob
def filter_pipelines2(normal_germline,normal_somatic, tumor_somatic,pair_somatic,output_path,pp=[0,2,3,4,5,6]):
def filter_pipelines2(normal_germline,normal_somatic, tumor_somatic,pair_somatic,output_path,pp=[0,2,3,4,5,6],snp_path='/home/liaoth/data/humandb/snp138Common.name.txt'):
'''
created at 2017-06-26 advances XK filter pipelines
Expand All @@ -15,7 +15,7 @@ def filter_pipelines2(normal_germline,normal_somatic, tumor_somatic,pair_somatic
:return:
'''
t1 = time.time()
snp138_common = snp_138_common_init()
snp138_common = snp_138_common_init(snp_path)
TUMOR_S = pd.read_csv(tumor_somatic, index_col=None)
TUMOR_P = pd.read_csv(pair_somatic, index_col=None)
NORMAL_S = pd.read_csv(normal_somatic, index_col=None)
Expand Down Expand Up @@ -71,7 +71,7 @@ def filter_pipelines2(normal_germline,normal_somatic, tumor_somatic,pair_somatic
descriptions.append('Coverage')
counts.append([len(TUMOR_S_filtered_index),len(TUMOR_P_filtered_index)])
if 3 in pp:
TUMOR_S_filtered_index = snp_common(snp138_common, TUMOR_S.loc[TUMOR_S_filtered_index+special_S_index, :])
TUMOR_S_filtered_index = snp_common(snp138_common, TUMOR_S.loc[TUMOR_S_filtered_index+special_S_index, :],)
TUMOR_P_filtered_index = snp_common(snp138_common, TUMOR_P.loc[TUMOR_P_filtered_index+special_P_index, :])
print('finish snp common filter.....')
descriptions.append('Snp_common')
Expand Down Expand Up @@ -168,8 +168,8 @@ def filter_pipelines2(normal_germline,normal_somatic, tumor_somatic,pair_somatic
if not os.path.isdir(os.path.dirname(output_file)):
os.makedirs(os.path.dirname(output_file))
print("filter_pipelines2(%s,%s,%s,%s,%s,pp=[3,4,5,6])" % (germline,somatic_normal,somatic_tumor,somatic_pair,output_file))
filter_pipelines2(germline,somatic_normal,somatic_tumor,somatic_pair,output_file,pp=[3,4,5,6])
filter_pipelines2(germline, somatic_normal, somatic_tumor, somatic_pair, output_file.replace('except_AF_depth.csv','except_AF_depth_PASS.csv'), pp=[3, 4, 6])
filter_pipelines2(germline,somatic_normal,somatic_tumor,somatic_pair,output_file,pp=[3,4,5,6],snp_path=snp138_common_file)
filter_pipelines2(germline, somatic_normal, somatic_tumor, somatic_pair, output_file.replace('except_AF_depth.csv','except_AF_depth_PASS.csv'), pp=[3, 4, 6],snp_path=snp138_common_file)
# print()


Expand Down
2 changes: 1 addition & 1 deletion aft_pipelines_analysis/run_add_per_info_into_csv.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def run_batch(input_path,tb_bam_path,nb_bam_path,NORMAL_SIG,TUMOR_SIG):
nb_bam_path = "%s/output/XK_result/{nb}/{nb}.recal_reads.bam" % os.path.dirname(base_outpath.rstrip('/'))

cmdlines = run_batch(input_path,tb_bam_path,nb_bam_path,NORMAL_SIG,TUMOR_SIG)
makesure = input("If your `num of processes >4`, Please be careful of memory. It may stalled whole server.\nUsing %s processes, prepare process listing files: \n . %sIf you make sure, please type y/Y." % (num_processes,'\n'.join(glob.glob(input_path))))
makesure = str(input("If your `num of processes >4`, Please be careful of memory. It may stalled whole server.\nUsing %s processes, prepare process listing files: \n . %sIf you make sure, please type y/Y." % (num_processes,'\n'.join(glob.glob(input_path)))))

if makesure.strip().upper() == 'Y':
pool = multiprocessing.Pool(num_processes)
Expand Down
5 changes: 4 additions & 1 deletion aft_pipelines_analysis/run_info_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,10 @@ def cov_depth(cov_info):
num_processes = 4
parsing_path = '%s/XK_result/*/*cov.info' % base_outpath

makesure = input("If your `num of processes >4`, Please be careful of memory. It may stalled whole server.\nUsing %s processes, prepare process listing files: \n . %s\n\nIf you make sure, please type y/Y." % (num_processes,'\n'.join(glob.glob(parsing_path))))
makesure = str(input("If your `num of processes >4`, Please be careful of memory. It may stalled whole server.\nUsing %s processes, prepare process listing files: \n . %s\n\nIf you make sure, please type y/Y." %
(num_processes,'\n'.join(glob.glob(parsing_path)))
)
)

if makesure.strip().upper() == 'Y':
pool = multiprocessing.Pool(num_processes)
Expand Down

0 comments on commit dc92e26

Please sign in to comment.