Skip to content

Commit

Permalink
clean2
Browse files Browse the repository at this point in the history
  • Loading branch information
leonard committed Jun 9, 2024
1 parent 41f98fb commit ec70b2c
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 98 deletions.
2 changes: 1 addition & 1 deletion scripts/bokeh_plot/components/convert_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def prepare_time_data(time_data_list, keypair, timepairs):
export = data.to_dict(orient="list")
return export

# include yaml names and keys

def prepare_size_data(size_data, keypair, sizepairs):
"""Returns a dictionary containing the given data in the format for the size plot."""
key, keyname = keypair
Expand Down
1 change: 0 additions & 1 deletion scripts/bokeh_plot/components/helpers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Helper functions."""


def convert_dic_to_list(data):
"""Returns a list containing the values of the given list as strings."""
return [str(i) for i in data]
Expand Down
1 change: 1 addition & 0 deletions scripts/bokeh_plot/components/plot_css_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def get_button_style():
}
"""]


def get_global_style():
"""Returns the global CSS style."""
return """
Expand Down
6 changes: 3 additions & 3 deletions scripts/bokeh_plot/plot.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from components.log_init import log_init
log_init(snakemake.log[0]) # type: ignore

import csv
import os
import pandas as pd

from bokeh.layouts import column, row
Expand Down Expand Up @@ -79,9 +77,11 @@ def create_plot():
with open(SIZE_INPUT, "r", encoding="utf-8") as size_file, open(TIME_INPUT, "r", encoding="utf-8") as timing_file:
time_data = pd.read_csv(timing_file, delimiter="\t")
size_data = pd.read_csv(size_file, delimiter="\t")

for key in KEYS.keys():
time_dic = prepare_time_data(time_data, (key, KEYS[key]), TIME["NAMES"])
size_dic = prepare_size_data(size_data, (key, KEYS[key]), SIZE["NAMES"])

time_x_range = round(max(time_dic["wall_clock_time_in_seconds"]) * 1.05, 3)
size_x_range = round(max(size_dic["GB_TOTAL_SIZE"]) * 1.05, 3)

Expand All @@ -96,8 +96,8 @@ def create_plot():
vercel_div = create_vercel_div()
all_elements = column(both_plots, vercel_div, sizing_mode="scale_both")
tabs.append(TabPanel(child=all_elements, title=KEYS[key]))

add_description_tab(tabs)

save(Tabs(tabs=tabs, sizing_mode="scale_both", stylesheets=[get_tab_style(), get_global_style()]))

create_plot()
Expand Down
97 changes: 4 additions & 93 deletions scripts/plot_parameters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ KEYS:
U: U
U+R: "U and R"


TIME:
FORMAT:
- determine_query_length_in_seconds
Expand All @@ -28,15 +29,15 @@ TIME:
load_index_in_seconds: "Load index"
load_index_percentage: "Load index percentage"
compute_minimiser_avg_in_seconds: "Compute minimizer (avg)"
compute_minimiser_max_percentage: "Compute minimizer (max) percentage"
compute_minimiser_avg_percentage: "Compute minimizer (avg) percentage"
compute_minimiser_max_in_seconds: "Compute minimizer (max)"
compute_minimiser_max_percentage: "Compute minimizer (max) percentage"
query_ibf_avg_in_seconds: "Query IBF (avg)"
query_ibf_max_percentage: "Query IBF (max) percentage"
query_ibf_avg_percentage: "Query IBF (avg) percentage"
query_ibf_max_in_seconds: "Query IBF (max)"
query_ibf_max_percentage: "Query IBF (max) percentage"
generate_results_avg_in_seconds: "Generate results (avg)"
generate_results_max_percentage: "Generate results (max) percentage"
generate_results_avg_percentage: "Generate results (avg) percentage"
generate_results_max_in_seconds: "Generate results (max)"
generate_results_max_percentage: "Generate results (max) percentage"

Expand All @@ -58,93 +59,3 @@ SIZE:
LEVEL_2_AVG_LOAD_FACTOR: "Level 2: Avg load factor"
LEVEL_3_AVG_LOAD_FACTOR: "Level 3: Avg load factor"
GB_TOTAL_SIZE: "Total size"

# SIZE:
# FORMAT:
# LEVEL_0: "Level 0"
# LEVEL_1: "Level 1"
# LEVEL_2: "Level 2"
# LEVEL_3: "Level 3"
# HOVER:
# SUBKEY_VALUE: "SUBKEY_VALUE"
# SIZE: "Size"
# # LEVEL_0_BITSIZE: "Level 0 size"
# # LEVEL_1_BITSIZE: "Level 1 size"
# # LEVEL_2_BITSIZE: "Level 2 size"
# # LEVEL_3_BITSIZE: "Level 3 size"
# ADVANCED_HOVER:
# SUBKEY_VALUE: "SUBKEY_VALUE"
# SIZE: "Size"
# AVG_LOAD_FACTOR: "Avg load factor"
# # LEVEL_0_BIT_SIZE: "Level 0 size"
# # LEVEL_1_BIT_SIZE: "Level 1 size"
# # LEVEL_2_BIT_SIZE: "Level 2 size"
# # LEVEL_3_BIT_SIZE: "Level 3 size"
# # LEVEL_0_AVG_LOAD_FACTOR: "Level 0 avg load factor"
# # LEVEL_1_AVG_LOAD_FACTOR: "Level 1 avg load factor"
# # LEVEL_2_AVG_LOAD_FACTOR: "Level 2 avg load factor"
# # LEVEL_3_AVG_LOAD_FACTOR: "Level 3 avg load factor"

# KEYS_FORMAT:
# - "alpha"
# - "hash"
# - "kmer"
# - "relaxed-fpr"
# - "none"
# - "U"
# - "U+R"

# KEYS_NAMES:
# - "alpha"
# - "hash"
# - "k-mer"
# - "relaxed-fpr"
# - "no U no R"
# - "U"
# - "U and R"

# TIME_FORMAT:
# - "SUBKEY"
# - "determine_query_length_in_seconds"
# - "query_file_io_in_seconds"
# - "load_index_in_seconds"
# - "compute_minimiser_max_in_seconds"
# - "query_ibf_max_in_seconds"
# - "generate_results_max_in_seconds"

# TIME_NAMES:
# - "Determine query length"
# - "Queryfile IO"
# - "Load index"
# - "Compute minimizer (max)"
# - "Query IBF (max)"
# - "Generate results (max)"

# TIME_HOVER:
# - "Determine query length"
# - "Queryfile IO"
# - "Load index"
# - "Compute minimizer (max)"
# - "Query IBF (max)"
# - "Generate results (max)"

# TIME_HOVER_ADVANCED:
# - "Determine query length"
# - "Queryfile IO"
# - "Load index"
# - "Compute minimizer (max)"
# - "Query IBF (max)"
# - "Generate results (max)"

# SIZE_FORMAT:
# - "SUBKEY"
# - "LEVEL_0"
# - "LEVEL_1"
# - "LEVEL_2"
# - "LEVEL_3"

# SIZE_NAMES:
# - "Level 0"
# - "Level 1"
# - "Level 2"
# - "Level 3"

0 comments on commit ec70b2c

Please sign in to comment.