Skip to content

Commit f078b6e

Browse files
authored
Merge pull request #92 from Pennycook/remove-dump-option
Remove --dump option
2 parents 2a834ac + 5d87c42 commit f078b6e

File tree

3 files changed

+0
-76
lines changed

3 files changed

+0
-76
lines changed

bin/codebasin

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -93,14 +93,6 @@ def main():
9393
+ "May be specified multiple times. "
9494
+ "If not specified, all reports will be generated.",
9595
)
96-
deprecated_args.add_argument(
97-
"-d",
98-
"--dump",
99-
dest="dump",
100-
metavar="<file.json>",
101-
action="store",
102-
help="Dump out annotated platform/parsing tree to <file.json>.",
103-
)
10496
parser.add_argument(
10597
"-x",
10698
"--exclude",
@@ -140,13 +132,6 @@ def main():
140132
max(1, logging.WARNING - 10 * (args.verbose - args.quiet)),
141133
)
142134

143-
# Warnings for deprecated functionality with no planned replacement.
144-
if args.dump:
145-
warnings.warn(
146-
"--dump will be removed in a future release.",
147-
DeprecationWarning,
148-
)
149-
150135
# If no specific report was specified, generate all reports.
151136
# Handled here to prevent "all" always being in the list.
152137
if len(args.reports) == 0:
@@ -249,15 +234,6 @@ def main():
249234
platform_mapper = PlatformMapper(codebase)
250235
setmap = platform_mapper.walk(state)
251236

252-
if args.dump:
253-
if util.ensure_json(args.dump):
254-
report.annotated_dump(args.dump, state)
255-
else:
256-
logging.getLogger("codebasin").warning(
257-
"Output path for annotation dump does not end with .json: "
258-
f"'{args.dump}'. Skipping dump.",
259-
)
260-
261237
def report_enabled(name):
262238
if "all" in args.reports:
263239
return True

codebasin/preprocessor.py

Lines changed: 0 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -551,12 +551,6 @@ def add_child(self, child):
551551
self.children.append(child)
552552
child.parent = self
553553

554-
def to_json(self, assoc):
555-
return {
556-
"platforms": list(assoc[self]),
557-
"children": [x.to_json(assoc) for x in self.children],
558-
}
559-
560554
@staticmethod
561555
def is_start_node():
562556
"""
@@ -614,17 +608,6 @@ def __compute_file_hash(self):
614608

615609
return hasher.hexdigest()
616610

617-
def to_json(self, assoc):
618-
parent_json = super().to_json(assoc)
619-
mydict = {
620-
"type": "file",
621-
"file": self.filename,
622-
"name": os.path.basename(self.filename),
623-
"sloc": self.total_sloc,
624-
}
625-
parent_json.update(mydict)
626-
return parent_json
627-
628611
def __repr__(self):
629612
return _representation_string(self, attrs=["filename"])
630613

@@ -653,23 +636,6 @@ def __init__(self, start_line=-1, end_line=-1, num_lines=0, source=None):
653636
self.num_lines = num_lines
654637
self.source = source
655638

656-
def to_json(self, assoc):
657-
parent_json = super().to_json(assoc)
658-
if self.source:
659-
source = "\n".join(self.source)
660-
else:
661-
source = None
662-
663-
mydict = {
664-
"type": "code",
665-
"start_line": self.start_line,
666-
"end_line": self.end_line,
667-
"sloc": self.num_lines,
668-
"source": source,
669-
}
670-
parent_json.update(mydict)
671-
return parent_json
672-
673639
def __repr__(self):
674640
return _representation_string(
675641
self,
@@ -707,12 +673,6 @@ class DirectiveNode(CodeNode):
707673
def __init__(self):
708674
super().__init__()
709675

710-
def to_json(self, assoc):
711-
parent_json = super().to_json(assoc)
712-
mydict = {"type": "directive", "source": "\n".join(self.spelling())}
713-
parent_json.update(mydict)
714-
return parent_json
715-
716676

717677
class UnrecognizedDirectiveNode(DirectiveNode):
718678
"""

codebasin/report.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
"""
66

77
import itertools as it
8-
import json
98
import logging
109
import warnings
1110

@@ -14,17 +13,6 @@
1413
log = logging.getLogger("codebasin")
1514

1615

17-
def annotated_dump(output_file, state):
18-
outlist = []
19-
for fname in state.get_filenames():
20-
source_tree = state.get_tree(fname)
21-
node_associations = state.get_map(fname)
22-
outlist.append(source_tree.root.to_json(node_associations))
23-
24-
with open(output_file, "w") as fp:
25-
fp.write(json.dumps(outlist, indent=2))
26-
27-
2816
def extract_platforms(setmap):
2917
"""
3018
Extract a list of unique platforms from a set map

0 commit comments

Comments
 (0)