Skip to content

Commit 1c6f574

Browse files
authored
docs(getting-started): update stackablectl op install output (#489)
* chore(getting-started): add warning to generated script * docs(getting-started): update stackablectl op install output * chore(ruff): apply formatting * chore(markdownlint): ignore heading rule for template partials * chore(readme): render readme * docs(getting-started): make stackablectl op install output templating consistent with others
1 parent 4ad0b08 commit 1c6f574

File tree

10 files changed

+103
-51
lines changed

10 files changed

+103
-51
lines changed

.readme/README.md.j2

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
<!-- markdownlint-disable MD041 -->
12
{%- set title="Stackable Operator for Apache Hive" -%}
23
{%- set operator_name="hive" -%}
34
{%- set operator_docs_slug="hive" -%}

.readme/partials/main.md.j2

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
<!-- markdownlint-disable MD041 -->
12
This is a Kubernetes operator to manage [Apache Hive](https://hive.apache.org/).
23

34
{% filter trim %}

README.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
<!-- markdownlint-disable MD041 -->
1+
<!-- markdownlint-disable MD041 --><!-- markdownlint-disable MD041 -->
22
<p align="center">
33
<img width="150" src="./.readme/static/borrowed/Icon_Stackable.svg" alt="Stackable Logo"/>
44
</p>
@@ -13,6 +13,7 @@
1313

1414
[Documentation](https://docs.stackable.tech/home/stable/hive) | [Stackable Data Platform](https://stackable.tech/) | [Platform Docs](https://docs.stackable.tech/) | [Discussions](https://github.com/orgs/stackabletech/discussions) | [Discord](https://discord.gg/7kZ3BNnCAF)
1515

16+
<!-- markdownlint-disable MD041 -->
1617
This is a Kubernetes operator to manage [Apache Hive](https://hive.apache.org/).
1718

1819
<!-- markdownlint-disable MD041 MD051 -->

docs/modules/hive/examples/getting_started/getting_started.sh

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,21 @@
11
#!/usr/bin/env bash
22
set -euo pipefail
33

4+
# DO NOT EDIT THE SCRIPT
5+
# Instead, update the j2 template, and regenerate it for dev:
6+
# cat <<EOF | jinja2 --format yaml getting_started.sh.j2 -o getting_started.sh
7+
# helm:
8+
# repo_name: stackable-dev
9+
# repo_url: https://repo.stackable.tech/repository/helm-dev/
10+
# versions:
11+
# commons: 0.0.0-dev
12+
# hive: 0.0.0-dev
13+
# listener: 0.0.0-dev
14+
# secret: 0.0.0-dev
15+
# minio: 4.0.2
16+
# postgresql: 12.1.5
17+
# EOF
18+
419
# The getting started guide script
520
# It uses tagged regions which are included in the documentation
621
# https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/
@@ -39,8 +54,8 @@ helm install --wait hive-operator stackable-dev/hive-operator --version 0.0.0-de
3954
echo "Install minio for S3"
4055
# tag::helm-install-minio[]
4156
helm install minio \
42-
--namespace default \
4357
--version 4.0.2 \
58+
--namespace default \
4459
--set mode=standalone \
4560
--set replicas=1 \
4661
--set persistence.enabled=false \
@@ -55,7 +70,7 @@ helm install minio \
5570
echo "Install postgres for Hive"
5671
# tag::helm-install-postgres[]
5772
helm install postgresql \
58-
--version=12.1.5 \
73+
--version 12.1.5 \
5974
--namespace default \
6075
--set auth.username=hive \
6176
--set auth.password=hive \

docs/modules/hive/examples/getting_started/getting_started.sh.j2

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,21 @@
11
#!/usr/bin/env bash
22
set -euo pipefail
33

4+
# DO NOT EDIT THE SCRIPT
5+
# Instead, update the j2 template, and regenerate it for dev:
6+
# cat <<EOF | jinja2 --format yaml getting_started.sh.j2 -o getting_started.sh
7+
# helm:
8+
# repo_name: stackable-dev
9+
# repo_url: https://repo.stackable.tech/repository/helm-dev/
10+
# versions:
11+
# commons: 0.0.0-dev
12+
# hive: 0.0.0-dev
13+
# listener: 0.0.0-dev
14+
# secret: 0.0.0-dev
15+
# minio: 4.0.2
16+
# postgresql: 12.1.5
17+
# EOF
18+
419
# The getting started guide script
520
# It uses tagged regions which are included in the documentation
621
# https://docs.asciidoctor.org/asciidoc/latest/directives/include-tagged-regions/
@@ -39,8 +54,8 @@ helm install --wait hive-operator {{ helm.repo_name }}/hive-operator --version {
3954
echo "Install minio for S3"
4055
# tag::helm-install-minio[]
4156
helm install minio \
57+
--version {{ versions.minio }} \
4258
--namespace default \
43-
--version 4.0.2 \
4459
--set mode=standalone \
4560
--set replicas=1 \
4661
--set persistence.enabled=false \
@@ -55,7 +70,7 @@ helm install minio \
5570
echo "Install postgres for Hive"
5671
# tag::helm-install-postgres[]
5772
helm install postgresql \
58-
--version=12.1.5 \
73+
--version {{ versions.postgresql }} \
5974
--namespace default \
6075
--set auth.username=hive \
6176
--set auth.password=hive \
Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
# tag::stackablectl-install-operators-output[]
2-
[INFO ] Installing commons operator in version 0.0.0-dev
3-
[INFO ] Installing secret operator in version 0.0.0-dev
4-
[INFO ] Installing listener operator in version 0.0.0-dev
5-
[INFO ] Installing hive operator in version 0.0.0-dev
6-
# end::stackablectl-install-operators-output[]
1+
Installed commons=0.0.0-dev operator
2+
Installed secret=0.0.0-dev operator
3+
Installed listener=0.0.0-dev operator
4+
Installed hive=0.0.0-dev operator
Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
# tag::stackablectl-install-operators-output[]
2-
[INFO ] Installing commons operator in version {{ versions.commons }}
3-
[INFO ] Installing secret operator in version {{ versions.secret }}
4-
[INFO ] Installing listener operator in version {{ versions.listener }}
5-
[INFO ] Installing hive operator in version {{ versions.hive }}
6-
# end::stackablectl-install-operators-output[]
1+
Installed commons={{ versions.commons }} operator
2+
Installed secret={{ versions.secret }} operator
3+
Installed listener={{ versions.listener }} operator
4+
Installed hive={{ versions.hive }} operator

docs/modules/hive/pages/getting_started/installation.adoc

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,9 +111,7 @@ include::example$getting_started/getting_started.sh[tag=stackablectl-install-ope
111111
The tool will show
112112

113113
[source]
114-
----
115-
include::example$getting_started/install-operator-output.txt[tag=stackablectl-install-operators-output]
116-
----
114+
include::example$getting_started/install_output.txt[]
117115

118116
=== Helm
119117

tests/templates/kuttl/logging/test_log_aggregation.py

Lines changed: 18 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@
44

55
def check_sent_events():
66
response = requests.post(
7-
'http://hive-vector-aggregator:8686/graphql',
7+
"http://hive-vector-aggregator:8686/graphql",
88
json={
9-
'query': """
9+
"query": """
1010
{
1111
transforms(first:100) {
1212
nodes {
@@ -20,29 +20,30 @@ def check_sent_events():
2020
}
2121
}
2222
"""
23-
}
23+
},
2424
)
2525

26-
assert response.status_code == 200, \
27-
'Cannot access the API of the vector aggregator.'
26+
assert (
27+
response.status_code == 200
28+
), "Cannot access the API of the vector aggregator."
2829

2930
result = response.json()
3031

31-
transforms = result['data']['transforms']['nodes']
32+
transforms = result["data"]["transforms"]["nodes"]
3233
for transform in transforms:
33-
sentEvents = transform['metrics']['sentEventsTotal']
34-
componentId = transform['componentId']
34+
sentEvents = transform["metrics"]["sentEventsTotal"]
35+
componentId = transform["componentId"]
3536

36-
if componentId == 'filteredInvalidEvents':
37-
assert sentEvents is None or \
38-
sentEvents['sentEventsTotal'] == 0, \
39-
'Invalid log events were sent.'
37+
if componentId == "filteredInvalidEvents":
38+
assert (
39+
sentEvents is None or sentEvents["sentEventsTotal"] == 0
40+
), "Invalid log events were sent."
4041
else:
41-
assert sentEvents is not None and \
42-
sentEvents['sentEventsTotal'] > 0, \
43-
f'No events were sent in "{componentId}".'
42+
assert (
43+
sentEvents is not None and sentEvents["sentEventsTotal"] > 0
44+
), f'No events were sent in "{componentId}".'
4445

4546

46-
if __name__ == '__main__':
47+
if __name__ == "__main__":
4748
check_sent_events()
48-
print('Test successful!')
49+
print("Test successful!")

tests/templates/kuttl/smoke/test_metastore.py

Lines changed: 38 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,7 @@
1515

1616

1717
def table(db_name, table_name, location):
18-
columns = [
19-
ColumnBuilder("id", "string", "col comment").build()
20-
]
18+
columns = [ColumnBuilder("id", "string", "col comment").build()]
2119

2220
serde_info = SerDeInfoBuilder(
2321
serialization_lib="org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
@@ -40,11 +38,15 @@ def table(db_name, table_name, location):
4038
return test_table
4139

4240

43-
if __name__ == '__main__':
41+
if __name__ == "__main__":
4442
all_args = argparse.ArgumentParser(description="Test hive metastore.")
4543
all_args.add_argument("-p", "--port", help="Metastore server port", default="9083")
46-
all_args.add_argument("-d", "--database", help="Test DB name", default="test_metastore")
47-
all_args.add_argument("-m", "--metastore", help="The host or service to connect to", required=True)
44+
all_args.add_argument(
45+
"-d", "--database", help="Test DB name", default="test_metastore"
46+
)
47+
all_args.add_argument(
48+
"-m", "--metastore", help="The host or service to connect to", required=True
49+
)
4850
args = vars(all_args.parse_args())
4951

5052
database_name = args["database"]
@@ -61,24 +63,46 @@ def table(db_name, table_name, location):
6163

6264
# Local access
6365
try:
64-
hive_client.create_table(table(database_name, local_test_table_name, f"/stackable/warehouse/location_{database_name}_{local_test_table_name}"))
66+
hive_client.create_table(
67+
table(
68+
database_name,
69+
local_test_table_name,
70+
f"/stackable/warehouse/location_{database_name}_{local_test_table_name}",
71+
)
72+
)
6573
except AlreadyExistsException:
6674
print(f"[INFO]: Table {local_test_table_name} already existed")
67-
schema = hive_client.get_schema(db_name=database_name, table_name=local_test_table_name)
68-
expected = [FieldSchema(name='id', type='string', comment='col comment')]
75+
schema = hive_client.get_schema(
76+
db_name=database_name, table_name=local_test_table_name
77+
)
78+
expected = [FieldSchema(name="id", type="string", comment="col comment")]
6979
if schema != expected:
70-
print("[ERROR]: Received local schema " + str(schema) + " - expected schema: " + expected)
80+
print(
81+
"[ERROR]: Received local schema "
82+
+ str(schema)
83+
+ " - expected schema: "
84+
+ expected
85+
)
7186
exit(-1)
7287

7388
# S3 access
7489
try:
75-
hive_client.create_table(table(database_name, s3_test_table_name, "s3a://hive/"))
90+
hive_client.create_table(
91+
table(database_name, s3_test_table_name, "s3a://hive/")
92+
)
7693
except AlreadyExistsException:
7794
print(f"[INFO]: Table {s3_test_table_name} already existed")
78-
schema = hive_client.get_schema(db_name=database_name, table_name=s3_test_table_name)
79-
expected = [FieldSchema(name='id', type='string', comment='col comment')]
95+
schema = hive_client.get_schema(
96+
db_name=database_name, table_name=s3_test_table_name
97+
)
98+
expected = [FieldSchema(name="id", type="string", comment="col comment")]
8099
if schema != expected:
81-
print("[ERROR]: Received s3 schema " + str(schema) + " - expected schema: " + expected)
100+
print(
101+
"[ERROR]: Received s3 schema "
102+
+ str(schema)
103+
+ " - expected schema: "
104+
+ expected
105+
)
82106
exit(-1)
83107

84108
# Removed test, because it failed against Hive 3.1.3. We do not know if the behavior of the Hive metastore changed or we made a mistake. We improved the Trino tests to do more stuff with S3 (e.g. writing tables) which passed,

0 commit comments

Comments
 (0)