Skip to content

Commit d0e5ea6

Browse files
committed
Export unsummited list data to a gpx file
This file uses BeautifulSoup(4) to take a list with ascent data on it, dumps all of the unsummited entries, grabs their GPS coordinates, then outputs the aggregate of the coordinates to a single .GPX file. This helped me save copious amounts of time which I previously spent making maps of unsummited peaks on CalTopo using tedious manual data entry.
1 parent e8b9ec7 commit d0e5ea6

File tree

1 file changed

+124
-0
lines changed

1 file changed

+124
-0
lines changed
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
#!/usr/bin/env python
2+
"""Peakbagger GPX data exporter.
3+
"""
4+
5+
import argparse
6+
import concurrent.futures
7+
import logging
8+
import re
9+
import urllib.parse
10+
11+
import bs4
12+
import gpxpy.gpx
13+
import requests
14+
15+
16+
argparser = argparse.ArgumentParser()
17+
argparser.add_argument("--output-file", default="exported.gpx")
18+
argparser.add_argument("url")
19+
20+
args = argparser.parse_args()
21+
22+
23+
ASCENT_LIST_FILTER = (
24+
"Peak",
25+
"Ascent Date",
26+
)
27+
28+
BS4_PARSER = "html5lib"
29+
30+
31+
LAT_LONG_RE = re.compile(
32+
r'(?P<latitude>[^"]+), (?P<longitude>[^,]+) \(Dec Deg\)',
33+
re.DOTALL,
34+
)
35+
36+
37+
def get_html_text(url):
38+
resp = requests.get(url)
39+
resp.raise_for_status()
40+
return resp.text
41+
42+
43+
def make_waypoint(name, latitude, longitude):
44+
return gpxpy.gpx.GPXWaypoint(latitude=latitude, longitude=longitude, name=name)
45+
46+
47+
def get_lat_long_from_peak_page(url):
48+
text = get_html_text(url)
49+
soup = bs4.BeautifulSoup(text, BS4_PARSER)
50+
51+
for lat_long_elem in soup.body.find(id="Form1").find_all("td"):
52+
for string in lat_long_elem.stripped_strings:
53+
match = LAT_LONG_RE.search(string)
54+
if match is None:
55+
continue
56+
return match.groupdict()
57+
58+
return None
59+
60+
61+
def get_list_of_ascents_from_ascent_list(url):
62+
text = get_html_text(url)
63+
soup = bs4.BeautifulSoup(text, BS4_PARSER)
64+
65+
rank_anchor = soup.body.find(id="Form1").find("th", string="Rank")
66+
if rank_anchor is None:
67+
return
68+
69+
ascent_header_row = rank_anchor.parent
70+
ascent_header_mapping = {
71+
i: ascent_header_elem.text
72+
for i, ascent_header_elem in enumerate(ascent_header_row.find_all("th"))
73+
if ascent_header_elem.text in ASCENT_LIST_FILTER
74+
}
75+
76+
peak_links = {}
77+
peaks_not_climbed = []
78+
79+
ascent_table = ascent_header_row.parent
80+
for i, ascent_row in enumerate(ascent_table.find_all("tr")):
81+
if ascent_row.find("th") or not ascent_row.text.strip():
82+
continue
83+
peak_ascent_data = {
84+
ascent_header_mapping[j]: ascent_col
85+
for j, ascent_col in enumerate(ascent_row.children)
86+
if j in ascent_header_mapping
87+
}
88+
peak_name = peak_ascent_data["Peak"].text.strip()
89+
peak_climbed = bool(peak_ascent_data["Ascent Date"].text.strip())
90+
if peak_climbed:
91+
logging.debug(f"Skipping {peak_name}; already climbed.")
92+
continue
93+
94+
peak_link = urllib.parse.urljoin(
95+
url, peak_ascent_data["Peak"].find("a").get("href")
96+
)
97+
peak_links[peak_name] = peak_link
98+
99+
with concurrent.futures.ThreadPoolExecutor() as executor:
100+
future_to_peaks = {
101+
executor.submit(get_lat_long_from_peak_page, peak_link): peak_name
102+
for peak_name, peak_link in peak_links.items()
103+
}
104+
105+
for i, future in enumerate(concurrent.futures.as_completed(future_to_peaks), 1):
106+
peak_name = future_to_peaks[future]
107+
peak_def = future.result()
108+
waypoint = make_waypoint(
109+
peak_name, peak_def["latitude"], peak_def["longitude"]
110+
)
111+
logging.info("[%02d/%02d] waypoints created.", i, len(peak_links))
112+
peaks_not_climbed.append(waypoint)
113+
114+
return peaks_not_climbed
115+
116+
117+
gpx = gpxpy.gpx.GPX()
118+
119+
peaks_not_climbed = get_list_of_ascents_from_ascent_list(args.url)
120+
121+
gpx.waypoints.extend(peaks_not_climbed)
122+
123+
with open(args.output_file, "w") as fp:
124+
fp.write(gpx.to_xml())

0 commit comments

Comments
 (0)