Skip to content

Commit

Permalink
Sphinx setup ready.
Browse files Browse the repository at this point in the history
  • Loading branch information
ioannis-vm committed Sep 16, 2024
1 parent 57d652a commit 12db8be
Show file tree
Hide file tree
Showing 7 changed files with 177 additions and 60 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
/doc/build/
*.pyc
/.ropeproject/
/doc/cache/
/doc/source/dl_doc/
172 changes: 122 additions & 50 deletions doc/source/_extensions/generate_dl_doc.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import hashlib
import json
import os
import shutil
Expand All @@ -10,7 +11,56 @@
from zipfile import ZipFile

import numpy as np
from doc.source._extensions.visuals import plot_fragility, plot_repair
from tqdm import tqdm

from visuals import plot_fragility, plot_repair

os.chdir('../')


def generate_md5(file_path):
"""
Generate an MD5 hash of a file.
Parameters
----------
file_path : str
The path to the file for which to generate the MD5 hash.
Returns
-------
str
The MD5 hash of the file.
"""
md5 = hashlib.md5()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b''):
md5.update(chunk)
return md5.hexdigest()


def combine_md5_hashes(md5_list):
"""
Combine a list of MD5 hashes and generate a new MD5 hash.
Parameters
----------
md5_list : list of str
A list of MD5 hashes.
Returns
-------
str
A new MD5 hash based on the combination of the given hashes.
"""
combined_md5 = hashlib.md5()
for md5_hash in md5_list:
combined_md5.update(md5_hash.encode('utf-8'))
return combined_md5.hexdigest()


def get_dlml_tag(dlml):
return '-'.join(str(dlml.parent).split('/')).replace(' ', '_')


def create_component_group_directory(cmp_groups, root, dlml_tag):
Expand All @@ -30,7 +80,6 @@ def create_component_group_directory(cmp_groups, root, dlml_tag):
)

grp_index_contents = dedent(f"""
.. _lbl-dlml_{dlml_tag}_{grp_id.replace(".", "_")}
{"*" * len(grp_name)}
{grp_name}
Expand All @@ -39,7 +88,7 @@ def create_component_group_directory(cmp_groups, root, dlml_tag):
The following models are available:
.. toctree::
:maxdepth: 1
:maxdepth: 8
""")

Expand All @@ -62,7 +111,6 @@ def create_component_group_directory(cmp_groups, root, dlml_tag):
grp_dir.mkdir(parents=True, exist_ok=True)

grp_index_contents = dedent(f"""
.. _lbl-dlml_{dlml_tag}_{grp_id.replace(".", "_")}
{"*" * len(grp_name)}
{grp_name}
Expand All @@ -81,20 +129,17 @@ def create_component_group_directory(cmp_groups, root, dlml_tag):
return member_ids


def generate_damage_docs():
resource_folder = Path()
def generate_damage_docs(doc_folder: Path, cache_folder: Path):

doc_folder = Path('/tmp/damage')
if os.path.exists(doc_folder):
shutil.rmtree(doc_folder)
doc_folder.mkdir(parents=True, exist_ok=True)
doc_folder = doc_folder / 'damage'

resource_folder = Path()

# get all the available damage dlmls
damage_dlmls = list(resource_folder.rglob('fragility.csv'))

# create the main index file
damage_index_contents = dedent("""\
.. _lbl-dlml_damage:
*************
Damage Models
Expand All @@ -103,25 +148,35 @@ def generate_damage_docs():
The following collections are available in our Damage and Loss Model Library:
.. toctree::
:maxdepth: 1
:maxdepth: 8
""")

# for each database
for dlml in damage_dlmls:
print('Working on ', dlml)
for dlml in (pbar := tqdm(damage_dlmls)):
pbar.set_postfix({'File': f'{str(dlml)[:80]:<80}'})

# blacklist
if ignore_file(dlml):
continue

# add dlml to main damage index file
damage_index_contents += f' {dlml}/index\n'
damage_index_contents += f' {dlml.parent}/index\n'

# create a folder
(doc_folder / dlml.parent).mkdir(parents=True, exist_ok=True)

plot_fragility(
str(dlml),
str((doc_folder / dlml.parent) / 'fragility.zip'),
create_zip='1',
)
zip_hash = generate_md5(dlml)
zip_filepath = ((cache_folder) / zip_hash).with_suffix('.zip')

# if it doesn't exist in the cahce, craete it.
# otherwise it exists, obviously.
if not zip_filepath.is_file():
plot_fragility(
str(dlml),
str(zip_filepath),
create_zip='1',
)

# check if there are metadata available
dlml_json = dlml.with_suffix('.json')
Expand All @@ -142,7 +197,6 @@ def generate_damage_docs():
)

dlml_index_contents = dedent(f"""
.. _lbl-dlml_damage_{dlml}
{"*" * len(dlml_short_name)}
{dlml_short_name}
Expand All @@ -159,7 +213,7 @@ def generate_damage_docs():
if dlml_cmp_groups is not None:
dlml_index_contents += dedent("""
.. toctree::
:maxdepth: 1
:maxdepth: 8
""")

Expand All @@ -179,7 +233,6 @@ def generate_damage_docs():

# create the top of the dlml index file
dlml_index_contents = dedent(f"""\
.. _lbl-dlml_damage_{dlml}
{"*" * len(dlml)}
{dlml}
Expand All @@ -194,7 +247,7 @@ def generate_damage_docs():
f.write(dlml_index_contents)

# now open the zip file
with ZipFile((doc_folder / dlml.parent) / 'fragility.zip', 'r') as zipObj:
with ZipFile(zip_filepath, 'r') as zipObj:
# for each component
for comp in sorted(zipObj.namelist()):
if comp == 'fragility':
Expand Down Expand Up @@ -291,20 +344,16 @@ def generate_damage_docs():
f.write(damage_index_contents)


def generate_repair_docs():
def generate_repair_docs(doc_folder: Path, cache_folder: Path):
resource_folder = Path()

doc_folder = Path('/tmp/repair')
if os.path.exists(doc_folder):
shutil.rmtree(doc_folder)
doc_folder.mkdir(parents=True, exist_ok=True)
doc_folder = doc_folder / 'repair'

# get all the available repair dlmls
repair_dlmls = list(resource_folder.rglob('consequence_repair.csv'))

# create the main index file
repair_index_contents = dedent("""\
.. _lbl-dlml_repair:
*************************
Repair Consequence Models
Expand All @@ -313,25 +362,35 @@ def generate_repair_docs():
The following collections are available in our Damage and Loss Model Library:
.. toctree::
:maxdepth: 1
:maxdepth: 8
""")

# for each database
for dlml in repair_dlmls:
print('Working on ', dlml)
for dlml in (pbar := tqdm(repair_dlmls)):
pbar.set_postfix({'File': f'{str(dlml)[:80]:<80}'})

# blacklist
if ignore_file(dlml):
continue

# add dlml to main repair index file
repair_index_contents += f' {dlml}/index\n'
repair_index_contents += f' {dlml.parent}/index\n'

# create a folder
(doc_folder / dlml.parent).mkdir(parents=True, exist_ok=True)

plot_repair(
str(dlml),
str((doc_folder / dlml.parent) / 'consequence_repair.zip'),
create_zip='1',
)
zip_hash = generate_md5(dlml)
zip_filepath = ((cache_folder) / zip_hash).with_suffix('.zip')

# if it doesn't exist in the cahce, craete it.
# otherwise it exists, obviously.
if not zip_filepath.is_file():
plot_repair(
str(dlml),
str(zip_filepath),
create_zip='1',
)

# check if there is metadata available
dlml_json = dlml.with_suffix('.json')
Expand All @@ -352,7 +411,6 @@ def generate_repair_docs():
)

dlml_index_contents = dedent(f"""
.. _lbl-dlml_repair_{dlml}
{"*" * len(dlml_short_name)}
{dlml_short_name}
Expand All @@ -369,12 +427,12 @@ def generate_repair_docs():
if dlml_cmp_groups is not None:
dlml_index_contents += dedent("""
.. toctree::
:maxdepth: 1
:maxdepth: 8
""")

# create the directory structure and index files
dlml_tag = '-'.join(str(dlml.parent).split('/')).replace(' ', '_')
dlml_tag = get_dlml_tag(dlml)
grp_ids = create_component_group_directory(
dlml_cmp_groups,
root=(doc_folder / dlml.parent),
Expand All @@ -389,7 +447,6 @@ def generate_repair_docs():

# create the top of the dlml index file
dlml_index_contents = dedent(f"""\
.. _lbl-dlml_repair_{dlml}
{"*" * len(dlml)}
{dlml}
Expand All @@ -404,9 +461,7 @@ def generate_repair_docs():
f.write(dlml_index_contents)

# now open the zip file
with ZipFile(
(doc_folder / dlml.parent) / 'consequence_repair.zip', 'r'
) as zipObj:
with ZipFile(zip_filepath, 'r') as zipObj:
html_files = [
Path(filepath).stem for filepath in sorted(zipObj.namelist())
]
Expand Down Expand Up @@ -521,10 +576,27 @@ def generate_repair_docs():
f.write(repair_index_contents)


def main(args):
generate_damage_docs()
generate_repair_docs()
def ignore_file(dlml):
"""Ignore certain paths due to lack of support. To remove."""
if str(dlml.parent) in {
'seismic/water_network/portfolio/Hazus v6.1',
'flood/building/portfolio/Hazus v6.1',
}:
return True
return False


def main():
cache_folder = Path('doc/cache')

doc_folder = Path('doc/source/dl_doc')
if os.path.exists(doc_folder):
shutil.rmtree(doc_folder)
doc_folder.mkdir(parents=True, exist_ok=True)

generate_damage_docs(doc_folder, cache_folder)
generate_repair_docs(doc_folder, cache_folder)


if __name__ == '__main__':
main(sys.argv[1:])
main()
40 changes: 40 additions & 0 deletions doc/source/_extensions/sphinx_generate_dl_doc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import os
import subprocess

from sphinx.application import Sphinx


def run_script(app: Sphinx):
"""
Run a custom Python script to generate files before Sphinx builds
the documentation.
Parameters
----------
app: Sphinx
The Sphinx application instance.
"""
script_path = os.path.join(app.srcdir, '_extensions', 'generate_dl_doc.py')

result = subprocess.run(['python', script_path], check=True)

if result.returncode != 0:
raise RuntimeError('Script execution failed')


def setup(app: Sphinx):
"""
Set up the custom Sphinx extension.
Parameters
----------
app: Sphinx
The Sphinx application instance.
"""
app.connect('builder-inited', run_script)

return {
'version': '1.0',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
4 changes: 0 additions & 4 deletions doc/source/_extensions/visuals.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,8 +423,6 @@ def plot_fragility(comp_db_path, output_path, create_zip='0'): # noqa: C901, D1

shutil.rmtree(output_path)

print('Successfully generated component vulnerability figures.') # noqa: T201


def plot_repair(comp_db_path, output_path, create_zip='0'): # noqa: C901, D103, PLR0912, PLR0915
# TODO: # noqa: TD002
Expand Down Expand Up @@ -993,8 +991,6 @@ def plot_repair(comp_db_path, output_path, create_zip='0'): # noqa: C901, D103,

shutil.rmtree(output_path)

print('Successfully generated component repair consequence figures.') # noqa: T201


def check_diff(comp_db_path, output_path): # noqa: D103
# if the output path already exists
Expand Down
Loading

0 comments on commit 12db8be

Please sign in to comment.