Skip to content

Commit c961f9f

Browse files
committed
fpath -> path
1 parent bff93e5 commit c961f9f

File tree

13 files changed

+142
-142
lines changed

13 files changed

+142
-142
lines changed

benchmark/job/cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -290,8 +290,8 @@ def _generate_job_workload(
290290
)
291291

292292
for qname in query_names:
293-
sql_fpath = fully_resolve_path(queries_parent_path / f"{qname}.sql")
294-
f.write(f"Q{qname},{sql_fpath}\n")
293+
sql_path = fully_resolve_path(queries_parent_path / f"{qname}.sql")
294+
f.write(f"Q{qname},{sql_path}\n")
295295

296296
workload_symlink_path = dbgym_workspace.link_result(workload_path)
297297
assert workload_symlink_path == expected_workload_symlink_path

benchmark/job/load_info.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,10 @@ class JobLoadInfo(LoadInfoBaseClass):
3838
def __init__(self, dbgym_workspace: DBGymWorkspace):
3939
# Schema (directly in the codebase).
4040
job_codebase_path = dbgym_workspace.base_dbgym_repo_dpath / "benchmark" / "job"
41-
self._schema_fpath = job_codebase_path / JOB_SCHEMA_FNAME
41+
self._schema_path = job_codebase_path / JOB_SCHEMA_FNAME
4242
assert (
43-
self._schema_fpath.exists()
44-
), f"self._schema_fpath ({self._schema_fpath}) does not exist"
43+
self._schema_path.exists()
44+
), f"self._schema_path ({self._schema_path}) does not exist"
4545

4646
# Tables
4747
tables_path = fully_resolve_path(
@@ -51,19 +51,19 @@ def __init__(self, dbgym_workspace: DBGymWorkspace):
5151
)
5252
self._tables_and_paths = []
5353
for table in JobLoadInfo.TABLES:
54-
table_fpath = tables_path / f"{table}.csv"
55-
self._tables_and_paths.append((table, table_fpath))
54+
table_path = tables_path / f"{table}.csv"
55+
self._tables_and_paths.append((table, table_path))
5656

57-
def get_schema_fpath(self) -> Path:
58-
return self._schema_fpath
57+
def get_schema_path(self) -> Path:
58+
return self._schema_path
5959

6060
def get_tables_and_paths(self) -> list[tuple[str, Path]]:
6161
return self._tables_and_paths
6262

6363
def get_table_file_delimiter(self) -> str:
6464
return ","
6565

66-
def get_constraints_fpath(self) -> Optional[Path]:
66+
def get_constraints_path(self) -> Optional[Path]:
6767
# JOB does not have any constraints. It does have indexes, but we don't want to create
6868
# those indexes so that the tuning agent can start from a clean slate.
6969
return None

benchmark/tpch/cli.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,11 +233,11 @@ def _generate_tpch_workload(
233233
)
234234

235235
for qname in query_names:
236-
sql_fpath = fully_resolve_path(queries_parent_path / f"{qname}.sql")
236+
sql_path = fully_resolve_path(queries_parent_path / f"{qname}.sql")
237237
assert is_fully_resolved(
238-
sql_fpath
238+
sql_path
239239
), "We should only write existent real absolute paths to a file"
240-
f.write(f"S{seed}-Q{qname},{sql_fpath}\n")
240+
f.write(f"S{seed}-Q{qname},{sql_path}\n")
241241

242242
workload_symlink_path = dbgym_workspace.link_result(workload_path)
243243
assert workload_symlink_path == expected_workload_symlink_path

benchmark/tpch/load_info.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,14 @@ def __init__(self, dbgym_workspace: DBGymWorkspace, scale_factor: float):
2727
tpch_codebase_path = (
2828
dbgym_workspace.base_dbgym_repo_dpath / "benchmark" / "tpch"
2929
)
30-
self._schema_fpath = tpch_codebase_path / TPCH_SCHEMA_FNAME
30+
self._schema_path = tpch_codebase_path / TPCH_SCHEMA_FNAME
3131
assert (
32-
self._schema_fpath.exists()
33-
), f"self._schema_fpath ({self._schema_fpath}) does not exist"
34-
self._constraints_fpath = tpch_codebase_path / TPCH_CONSTRAINTS_FNAME
32+
self._schema_path.exists()
33+
), f"self._schema_path ({self._schema_path}) does not exist"
34+
self._constraints_path = tpch_codebase_path / TPCH_CONSTRAINTS_FNAME
3535
assert (
36-
self._constraints_fpath.exists()
37-
), f"self._constraints_fpath ({self._constraints_fpath}) does not exist"
36+
self._constraints_path.exists()
37+
), f"self._constraints_path ({self._constraints_path}) does not exist"
3838

3939
# Tables
4040
tables_path = fully_resolve_path(
@@ -47,14 +47,14 @@ def __init__(self, dbgym_workspace: DBGymWorkspace, scale_factor: float):
4747
table_path = tables_path / f"{table}.tbl"
4848
self._tables_and_paths.append((table, table_path))
4949

50-
def get_schema_fpath(self) -> Path:
51-
return self._schema_fpath
50+
def get_schema_path(self) -> Path:
51+
return self._schema_path
5252

5353
def get_tables_and_paths(self) -> list[tuple[str, Path]]:
5454
return self._tables_and_paths
5555

5656
def get_table_file_delimiter(self) -> str:
5757
return "|"
5858

59-
def get_constraints_fpath(self) -> Optional[Path]:
60-
return self._constraints_fpath
59+
def get_constraints_path(self) -> Optional[Path]:
60+
return self._constraints_path

dbms/load_info_base_class.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ class LoadInfoBaseClass:
99
copy the comments or type annotations or else they might become out of sync.
1010
"""
1111

12-
def get_schema_fpath(self) -> Path:
12+
def get_schema_path(self) -> Path:
1313
raise NotImplementedError
1414

1515
def get_tables_and_paths(self) -> list[tuple[str, Path]]:
@@ -21,5 +21,5 @@ def get_table_file_delimiter(self) -> str:
2121

2222
# If the subclassing benchmark does not have constraints, you can return None here.
2323
# Constraints are also indexes.
24-
def get_constraints_fpath(self) -> Optional[Path]:
24+
def get_constraints_path(self) -> Optional[Path]:
2525
raise NotImplementedError

dbms/postgres/cli.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -301,14 +301,14 @@ def _load_into_dbdata(
301301
conn: sqlalchemy.Connection,
302302
load_info: LoadInfoBaseClass,
303303
) -> None:
304-
sql_file_execute(dbgym_workspace, conn, load_info.get_schema_fpath())
304+
sql_file_execute(dbgym_workspace, conn, load_info.get_schema_path())
305305

306306
# Truncate all tables first before even loading a single one.
307307
for table, _ in load_info.get_tables_and_paths():
308308
sqlalchemy_conn_execute(conn, f"TRUNCATE {table} CASCADE")
309309
# Then, load the tables.
310-
for table, table_fpath in load_info.get_tables_and_paths():
311-
with dbgym_workspace.open_and_save(table_fpath, "r") as table_csv:
310+
for table, table_path in load_info.get_tables_and_paths():
311+
with dbgym_workspace.open_and_save(table_path, "r") as table_csv:
312312
assert conn.connection.dbapi_connection is not None
313313
cur = conn.connection.dbapi_connection.cursor()
314314
try:
@@ -320,9 +320,9 @@ def _load_into_dbdata(
320320
finally:
321321
cur.close()
322322

323-
constraints_fpath = load_info.get_constraints_fpath()
324-
if constraints_fpath is not None:
325-
sql_file_execute(dbgym_workspace, conn, constraints_fpath)
323+
constraints_path = load_info.get_constraints_path()
324+
if constraints_path is not None:
325+
sql_file_execute(dbgym_workspace, conn, constraints_path)
326326

327327

328328
# The start and stop functions slightly duplicate functionality from pg_conn.py. However, I chose to do it this way

env/pg_conn.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -37,32 +37,32 @@ def __init__(
3737
self,
3838
dbgym_workspace: DBGymWorkspace,
3939
pgport: int,
40-
pristine_dbdata_snapshot_fpath: Path,
40+
pristine_dbdata_snapshot_path: Path,
4141
dbdata_parent_dpath: Path,
4242
pgbin_path: Union[str, Path],
4343
# Whether this is None determines whether Boot is enabled.
44-
boot_config_fpath: Optional[Path],
44+
boot_config_path: Optional[Path],
4545
) -> None:
4646

4747
self.dbgym_workspace = dbgym_workspace
4848
self.pgport = pgport
4949
self.pgbin_path = pgbin_path
50-
self.boot_config_fpath = boot_config_fpath
50+
self.boot_config_path = boot_config_path
5151
self.log_step = 0
5252

5353
# All the paths related to dbdata
54-
# pristine_dbdata_snapshot_fpath is the .tgz snapshot that represents the starting state
54+
# pristine_dbdata_snapshot_path is the .tgz snapshot that represents the starting state
5555
# of the database (with the default configuration). It is generated by a call to
5656
# `python tune.py dbms postgres ...` and should not be overwritten.
57-
self.pristine_dbdata_snapshot_fpath = pristine_dbdata_snapshot_fpath
58-
# checkpoint_dbdata_snapshot_fpath is the .tgz snapshot that represents the current
57+
self.pristine_dbdata_snapshot_path = pristine_dbdata_snapshot_path
58+
# checkpoint_dbdata_snapshot_path is the .tgz snapshot that represents the current
5959
# state of the database as it is being tuned. It is generated while tuning and is
6060
# discarded once tuning is completed.
61-
self.checkpoint_dbdata_snapshot_fpath = (
61+
self.checkpoint_dbdata_snapshot_path = (
6262
dbgym_workspace.dbgym_tmp_path / "checkpoint_dbdata.tgz"
6363
)
6464
# dbdata_parent_dpath is the parent directory of the dbdata that is *actively being tuned*.
65-
# It is *not* the parent directory of pristine_dbdata_snapshot_fpath.
65+
# It is *not* the parent directory of pristine_dbdata_snapshot_path.
6666
# Setting this lets us control the hardware device dbdata is built on (e.g. HDD vs. SSD).
6767
self.dbdata_parent_dpath = dbdata_parent_dpath
6868
# dbdata_dpath is the dbdata that is *actively being tuned*
@@ -102,16 +102,16 @@ def disconnect(self) -> None:
102102
self._conn = None
103103

104104
def move_log(self) -> None:
105-
pglog_fpath = (
105+
pglog_path = (
106106
self.dbgym_workspace.cur_task_runs_artifacts_path(mkdir=True)
107107
/ f"pg{self.pgport}.log"
108108
)
109-
pglog_this_step_fpath = (
109+
pglog_this_step_path = (
110110
self.dbgym_workspace.cur_task_runs_artifacts_path(mkdir=True)
111111
/ f"pg{self.pgport}.log.{self.log_step}"
112112
)
113-
if pglog_fpath.exists():
114-
shutil.move(pglog_fpath, pglog_this_step_fpath)
113+
if pglog_path.exists():
114+
shutil.move(pglog_path, pglog_this_step_path)
115115
self.log_step += 1
116116

117117
def force_statement_timeout(self, timeout: float) -> None:
@@ -273,7 +273,7 @@ def restart_with_changes(
273273
"cf",
274274
# We append .tmp so that if we fail in the *middle* of running tar, we
275275
# still have the previous checkpoint available to us
276-
f"{self.checkpoint_dbdata_snapshot_fpath}.tmp",
276+
f"{self.checkpoint_dbdata_snapshot_path}.tmp",
277277
"-C",
278278
parent_dpath_of_path(self.dbdata_dpath),
279279
self.dbdata_dpath,
@@ -345,8 +345,8 @@ def restart_with_changes(
345345
)
346346

347347
# Set up Boot if we're told to do so
348-
if self.boot_config_fpath is not None:
349-
with self.dbgym_workspace.open_and_save(self.boot_config_fpath) as f:
348+
if self.boot_config_path is not None:
349+
with self.dbgym_workspace.open_and_save(self.boot_config_path) as f:
350350
boot_config = yaml.safe_load(f)
351351

352352
self._set_up_boot(
@@ -483,10 +483,10 @@ def get_system_knobs(self) -> dict[str, str]:
483483
return knobs
484484

485485
def restore_pristine_snapshot(self) -> bool:
486-
return self._restore_snapshot(self.pristine_dbdata_snapshot_fpath)
486+
return self._restore_snapshot(self.pristine_dbdata_snapshot_path)
487487

488488
def restore_checkpointed_snapshot(self) -> bool:
489-
return self._restore_snapshot(self.checkpoint_dbdata_snapshot_fpath)
489+
return self._restore_snapshot(self.checkpoint_dbdata_snapshot_path)
490490

491491
def _restore_snapshot(
492492
self,

env/tuning_artifacts.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -68,11 +68,11 @@ class DBMSConfigDelta:
6868
qknobs: QueryKnobsDelta
6969

7070

71-
def get_delta_at_step_fpath(tuning_artifacts_dpath: Path, step_num: int) -> Path:
71+
def get_delta_at_step_path(tuning_artifacts_dpath: Path, step_num: int) -> Path:
7272
return tuning_artifacts_dpath / f"step{step_num}_delta.json"
7373

7474

75-
def get_metadata_fpath(tuning_artifacts_dpath: Path) -> Path:
75+
def get_metadata_path(tuning_artifacts_dpath: Path) -> Path:
7676
return tuning_artifacts_dpath / "metadata.json"
7777

7878

@@ -88,7 +88,7 @@ def __init__(
8888
self.next_step_num = 0
8989

9090
# Write metadata file
91-
with get_metadata_fpath(self.tuning_artifacts_dpath).open("w") as f:
91+
with get_metadata_path(self.tuning_artifacts_dpath).open("w") as f:
9292
json.dump(metadata.asdict(), f)
9393

9494
def write_step(self, dbms_cfg_delta: DBMSConfigDelta) -> None:
@@ -97,7 +97,7 @@ def write_step(self, dbms_cfg_delta: DBMSConfigDelta) -> None:
9797
"""
9898
curr_step_num = self.next_step_num
9999
self.next_step_num += 1
100-
with get_delta_at_step_fpath(self.tuning_artifacts_dpath, curr_step_num).open(
100+
with get_delta_at_step_path(self.tuning_artifacts_dpath, curr_step_num).open(
101101
"w"
102102
) as f:
103103
json.dump(asdict(dbms_cfg_delta), f)
@@ -108,12 +108,12 @@ def __init__(self, tuning_artifacts_dpath: Path) -> None:
108108
self.tuning_artifacts_dpath = tuning_artifacts_dpath
109109
assert is_fully_resolved(self.tuning_artifacts_dpath)
110110
num_steps = 0
111-
while get_delta_at_step_fpath(self.tuning_artifacts_dpath, num_steps).exists():
111+
while get_delta_at_step_path(self.tuning_artifacts_dpath, num_steps).exists():
112112
num_steps += 1
113113
self.num_steps = num_steps
114114

115115
def get_metadata(self) -> TuningMetadata:
116-
with get_metadata_fpath(self.tuning_artifacts_dpath).open("r") as f:
116+
with get_metadata_path(self.tuning_artifacts_dpath).open("r") as f:
117117
data = json.load(f)
118118
return TuningMetadata(
119119
workload_path=Path(data["workload_path"]),
@@ -126,7 +126,7 @@ def get_metadata(self) -> TuningMetadata:
126126

127127
def get_delta_at_step(self, step_num: int) -> DBMSConfigDelta:
128128
assert step_num >= 0 and step_num < self.num_steps
129-
with get_delta_at_step_fpath(self.tuning_artifacts_dpath, step_num).open(
129+
with get_delta_at_step_path(self.tuning_artifacts_dpath, step_num).open(
130130
"r"
131131
) as f:
132132
data = json.load(f)

env/workload.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,12 @@ def __init__(self, dbgym_workspace: DBGymWorkspace, workload_dpath: Path) -> Non
1010
assert is_fully_resolved(self.workload_dpath)
1111

1212
self.queries: dict[str, str] = {}
13-
order_fpath = self.workload_dpath / "order.txt"
13+
order_path = self.workload_dpath / "order.txt"
1414
self.query_order: list[str] = []
1515

16-
assert order_fpath.exists()
16+
assert order_path.exists()
1717

18-
with self.dbgym_workspace.open_and_save(order_fpath) as f:
18+
with self.dbgym_workspace.open_and_save(order_path) as f:
1919
for line in f:
2020
qid, qpath = line.strip().split(",")
2121
qpath = Path(qpath)

manage/cli.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -101,14 +101,14 @@ def clean_workspace(
101101
any symlinks referenced in task_runs/run_*/ directories we have already decided to keep.
102102
"""
103103
# This stack holds the symlinks that are left to be processed
104-
symlink_fpaths_to_process: list[Path] = []
104+
symlink_paths_to_process: list[Path] = []
105105
# This set holds the symlinks that have already been processed to avoid infinite loops
106106
processed_symlinks: set[Path] = set()
107107

108108
# 1. Initialize paths to process
109109
if dbgym_workspace.dbgym_symlinks_path.exists():
110110
add_symlinks_in_dpath(
111-
symlink_fpaths_to_process,
111+
symlink_paths_to_process,
112112
dbgym_workspace.dbgym_symlinks_path,
113113
processed_symlinks,
114114
)
@@ -121,17 +121,17 @@ def clean_workspace(
121121
task_run_child_paths_to_keep = set()
122122

123123
if dbgym_workspace.dbgym_runs_path.exists():
124-
while symlink_fpaths_to_process:
125-
symlink_fpath: Path = symlink_fpaths_to_process.pop()
126-
assert symlink_fpath.is_symlink()
124+
while symlink_paths_to_process:
125+
symlink_path: Path = symlink_paths_to_process.pop()
126+
assert symlink_path.is_symlink()
127127
# Path.resolve() resolves all layers of symlinks while os.readlink() only resolves one layer.
128128
# However, os.readlink() literally reads the string contents of the link. We need to do some
129129
# processing on the result of os.readlink() to convert it to an absolute path
130-
real_path = symlink_fpath.resolve()
131-
one_layer_resolved_path = os.readlink(symlink_fpath)
130+
real_path = symlink_path.resolve()
131+
one_layer_resolved_path = os.readlink(symlink_path)
132132
assert str(real_path) == str(
133-
os.readlink(symlink_fpath)
134-
), f"symlink_fpath ({symlink_fpath}) seems to point to *another* symlink. This is difficult to handle, so it is currently disallowed. Please resolve this situation manually."
133+
os.readlink(symlink_path)
134+
), f"symlink_path ({symlink_path}) seems to point to *another* symlink. This is difficult to handle, so it is currently disallowed. Please resolve this situation manually."
135135

136136
# If the file doesn't exist, we'll just ignore it.
137137
if not real_path.exists():
@@ -170,7 +170,7 @@ def clean_workspace(
170170
# If on safe mode, add symlinks inside the task_run_child_path to be processed
171171
if mode == "safe":
172172
add_symlinks_in_dpath(
173-
symlink_fpaths_to_process,
173+
symlink_paths_to_process,
174174
task_run_child_path,
175175
processed_symlinks,
176176
)

0 commit comments

Comments
 (0)