-
Notifications
You must be signed in to change notification settings - Fork 3
/
visualizeInTraveler.py
executable file
·164 lines (146 loc) · 5.68 KB
/
visualizeInTraveler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
from random import randint
import inspect
from datetime import datetime
from urllib.parse import quote_plus
import requests
import json
import subprocess
import os
import contextlib, io
from phylanx.ast.physl import print_physl_src
try:
from IPython.core.display import display, HTML
except:
pass
from random import random
if "TRAVELER_PORT" in os.environ:
traveler_port = int(os.environ["TRAVELER_PORT"])
else:
traveler_port = 8000
# Allow environment variable to redirect output to something other than
# localhost; e.g. a traveler instance outside the docker container or a
# different machine
base_url = "http://%s:%d" % (os.environ.get("TRAVELER_IP", "localhost"), traveler_port)
def in_notebook():
try:
get_ipython().config
return True
except:
return False
def parse_traveler_response(resp, verbose):
result = resp.json()
# TODO: can use resp.iter_content(chunk_size=None, decode_unicode=True) to
# catch and print partial JSON while the data is bundling (instead of
# waiting for the whole process to finish), but displaying updates in
# Jupyter would require a fancier widget that we can update round-trip
# style...
if verbose:
print(result['log'])
return result
def visualizeInTraveler(fun, verbose=False):
fun_id = randint(0,2<<31)
fun_name = fun.backend.wrapped_function.__name__
if verbose:
print("APEX_OTF2:",os.environ.get("APEX_OTF2","is not set"))
print("APEX_PAPI_METRICS:",os.environ.get("APEX_PAPI_METRICS","is not set"))
if not hasattr(fun,"__perfdata__"):
print("Performance data was not collected for", fun_name)
return
physl_src_raw = fun.get_physl_source()
f = io.StringIO()
with contextlib.redirect_stdout(f):
print_physl_src(physl_src_raw)
# Note: dataset label is optional + no longer needs to be unique;
# defaults to "Untitled dataset" if omitted;
# '/' characters inside the label will be interpreted as parent folders
argMap = {
"label": fun_id,
"tags": [fun_name, 'JetLag'], # Can attach any string as a tag
"csv": fun.__perfdata__[0],
"newick": fun.__perfdata__[1],
"dot": fun.__perfdata__[2],
"physl": f.getvalue(),
"python": fun.get_python_src(fun.backend.wrapped_function)
}
import requests
resp = requests.post(base_url + '/datasets', json=argMap, stream=True)
resp.raise_for_status()
trav_id = parse_traveler_response(resp, verbose)['datasetId']
otf2Path = 'OTF2_archive/APEX.otf2'
if os.path.exists(otf2Path):
# Upload the OTF2 trace separately because we want to stream its
# contents instead of trying to load the whole thing into memory
def iterOtf2():
otfPipe = subprocess.Popen(['otf2-print', otf2Path], stdout=subprocess.PIPE)
for bytesChunk in otfPipe.stdout:
yield bytesChunk
otfPipe.stdout.flush()
otf2Response = requests.post(
base_url + '/datasets/%s/otf2' % trav_id,
stream=True,
timeout=None,
data=iterOtf2(),
headers={'content-type': 'text/text'}
)
otf2Response.raise_for_status()
parse_traveler_response(otf2Response, verbose)
if in_notebook():
display(HTML("<a target='the-viz' href='"+base_url+"/static/interface.html#%s'>Visualize %s-%d</a>" % (trav_id, fun_name, fun_id)))
else:
print("URL:", base_url+"/static/interface.html")
def visualizeDirInTraveler(jobid, pre, verbose=False):
# Read any small text files that exist
argMap = {
'csv': pre+'/py-csv.txt',
'newick': pre+'/py-tree.txt',
'dot': pre+'/py-graph.txt',
'physl': pre+'/physl-src.txt',
'python': pre+'/py-src.txt'
}
postData = {
"tags": ['Ran via JetLag']
}
with open(pre+'/label.txt', 'r') as fd:
postData['label'] = label = fd.read().strip()
for arg, path in argMap.items():
if os.path.exists(path):
with open(path, 'r') as fd:
postData[arg] = fd.read()
# Create the dataset in traveler
mainResponse = requests.post(base_url + '/datasets', json=postData)
mainResponse.raise_for_status()
trav_id = parse_traveler_response(mainResponse, verbose)['datasetId']
otf2Path = pre+'/OTF2_archive/APEX.otf2'
if os.path.exists(otf2Path):
# Upload the OTF2 trace separately because we want to stream its
# contents instead of trying to load the whole thing into memory
def iterOtf2():
otfPipe = subprocess.Popen(['otf2-print', otf2Path], stdout=subprocess.PIPE)
for bytesChunk in otfPipe.stdout:
yield bytesChunk
otfPipe.stdout.flush()
otf2Response = requests.post(
base_url + '/datasets/%s/otf2' % trav_id,
stream=True,
timeout=None,
data=iterOtf2(),
headers={'content-type': 'text/text'}
)
parse_traveler_response(otf2Response, verbose)
else:
otf2Response = None
if in_notebook():
display(HTML("<a target='the-viz' href='"+base_url+"/static/interface.html#%s'>Visualize %s</a>" % (trav_id, label)))
else:
print("URL:", base_url+"/static/interface.html")
return (mainResponse, otf2Response)
if __name__ == "__main__":
import sys
(m, o) = visualizeRemoteInTraveler(sys.argv[1])
for chunk in m.iter_content():
print(chunk.decode(), end='')
for chunk in o.iter_content():
print(chunk.decode(), end='')
def visualizeRemoteInTraveler(jobid, verbose=False):
pre = 'jobdata-'+jobid+'/run_dir'
visualizeDirInTraveler(jobid, pre, verbose)