Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions docs/bulk-exports.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,18 +155,19 @@ You can generate a JWKS using the RS384 algorithm and a random ID by running the
(Make sure you have `jose` installed first.)

```sh
jose jwk gen -s -i "{\"alg\":\"RS384\",\"kid\":\"`uuidgen`\"}" -o rsa.jwks
jose jwk gen -s -i "{\"alg\":\"RS384\",\"kid\":\"`uuidgen`\"}" -o private.jwks
jose jwk pub -s -i private.jwks -o public.jwks
```

Then give `rsa.jwks` to your FHIR server and to Cumulus ETL (details on that below).
Then give `public.jwks` to your FHIR server and `private.jwks` to Cumulus ETL (details on that below).

### SMART Arguments

You'll need to pass two new arguments to Cumulus ETL:

```sh
--smart-client-id=YOUR_CLIENT_ID
--smart-jwks=/path/to/rsa.jwks
--smart-jwks=/path/to/private.jwks
```

You can also give `--smart-client-id` a path to a file with your client ID,
Expand Down
81 changes: 54 additions & 27 deletions tests/loaders/ndjson/test_bulk_export.py
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Something changed in the json formatting library that something in our stack uses.

Previously, we were passing python objects to respx to return in mock http responses, then expecting a hardcoded number of bytes back - i.e. baking assumptions about the json encoding that would occur behind the scenes into our tests. Whoops.

So now the tests encode json to text themselves and keep track of the size.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was unrelated to the docs change, I'm just using this PR as a convenient place to fix the tests.

Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import contextlib
import datetime
import io
import json
import os
import tempfile
from unittest import mock
Expand Down Expand Up @@ -141,18 +142,21 @@ async def test_happy_path(self):
],
},
)
con1 = json.dumps({"resourceType": "Condition", "id": "1"})
self.respx_mock.get(
"https://example.com/con1",
headers={"Accept": "application/fhir+ndjson"},
).respond(json={"resourceType": "Condition", "id": "1"})
).respond(text=con1)
con2 = json.dumps({"resourceType": "Condition", "id": "2"})
self.respx_mock.get(
"https://example.com/con2",
headers={"Accept": "application/fhir+ndjson"},
).respond(json={"resourceType": "Condition", "id": "2"})
).respond(text=con2)
pat1 = json.dumps({"resourceType": "Patient", "id": "P"})
self.respx_mock.get(
"https://example.com/pat1",
headers={"Accept": "application/fhir+ndjson"},
).respond(json={"resourceType": "Patient", "id": "P"})
).respond(text=pat1)

await self.export()

Expand Down Expand Up @@ -216,7 +220,7 @@ async def test_happy_path(self):
),
(
"download_complete",
{"fileSize": 40, "fileUrl": "https://example.com/con1", "resourceCount": 1},
{"fileSize": len(con1), "fileUrl": "https://example.com/con1", "resourceCount": 1},
),
(
"download_request",
Expand All @@ -228,7 +232,7 @@ async def test_happy_path(self):
),
(
"download_complete",
{"fileSize": 40, "fileUrl": "https://example.com/con2", "resourceCount": 1},
{"fileSize": len(con2), "fileUrl": "https://example.com/con2", "resourceCount": 1},
),
(
"download_request",
Expand All @@ -240,11 +244,17 @@ async def test_happy_path(self):
),
(
"download_complete",
{"fileSize": 38, "fileUrl": "https://example.com/pat1", "resourceCount": 1},
{"fileSize": len(pat1), "fileUrl": "https://example.com/pat1", "resourceCount": 1},
),
(
"export_complete",
{"attachments": None, "bytes": 118, "duration": 0, "files": 3, "resources": 3},
{
"attachments": None,
"bytes": len(con1) + len(con2) + len(pat1),
"duration": 0,
"files": 3,
"resources": 3,
},
),
)

Expand Down Expand Up @@ -300,12 +310,11 @@ async def test_export_error(self):
"https://example.com/err2",
headers={"Accept": "application/fhir+ndjson"},
).respond(text=err2)
con1 = '{"resourceType": "Condition"}'
self.respx_mock.get(
"https://example.com/con1",
headers={"Accept": "application/fhir+ndjson"},
).respond(
json={"resourceType": "Condition"},
)
).respond(text=con1)

with self.assertRaisesRegex(
errors.FatalError, "Errors occurred during export:\n - err1\n - err2\n - err3\n - err4"
Expand Down Expand Up @@ -340,7 +349,7 @@ async def test_export_error(self):
),
(
"download_complete",
{"fileSize": 29, "fileUrl": "https://example.com/con1", "resourceCount": 1},
{"fileSize": len(con1), "fileUrl": "https://example.com/con1", "resourceCount": 1},
),
(
"download_request",
Expand All @@ -352,7 +361,7 @@ async def test_export_error(self):
),
(
"download_complete",
{"fileSize": 93, "fileUrl": "https://example.com/err1", "resourceCount": 1},
{"fileSize": len(err1), "fileUrl": "https://example.com/err1", "resourceCount": 1},
),
(
"download_request",
Expand All @@ -364,11 +373,17 @@ async def test_export_error(self):
),
(
"download_complete",
{"fileSize": 322, "fileUrl": "https://example.com/err2", "resourceCount": 3},
{"fileSize": len(err2), "fileUrl": "https://example.com/err2", "resourceCount": 3},
),
(
"export_complete",
{"attachments": None, "bytes": 444, "duration": 0, "files": 3, "resources": 5},
{
"attachments": None,
"bytes": len(con1) + len(err1) + len(err2),
"duration": 0,
"files": 3,
"resources": 5,
},
),
)

Expand Down Expand Up @@ -408,20 +423,22 @@ async def test_deleted_resources(self):
],
},
)
deleted1 = {
"resourceType": "Bundle",
"type": "transaction",
"entry": [
{
"request": {"method": "DELETE", "url": "Patient/123"},
}
],
}
self.respx_mock.get("https://example.com/deleted1").respond(json=deleted1)
deleted1 = json.dumps(
{
"resourceType": "Bundle",
"type": "transaction",
"entry": [
{
"request": {"method": "DELETE", "url": "Patient/123"},
}
],
}
)
self.respx_mock.get("https://example.com/deleted1").respond(text=deleted1)

await self.export()

bundle = common.read_json(f"{self.tmpdir}/deleted/Bundle.000.ndjson")
bundle = common.read_text(f"{self.tmpdir}/deleted/Bundle.000.ndjson")
self.assertEqual(bundle, deleted1)

self.assert_log_equals(
Expand All @@ -437,11 +454,21 @@ async def test_deleted_resources(self):
),
(
"download_complete",
{"fileSize": 117, "fileUrl": "https://example.com/deleted1", "resourceCount": 1},
{
"fileSize": len(deleted1),
"fileUrl": "https://example.com/deleted1",
"resourceCount": 1,
},
),
(
"export_complete",
{"attachments": None, "bytes": 117, "duration": 0, "files": 1, "resources": 1},
{
"attachments": None,
"bytes": len(deleted1),
"duration": 0,
"files": 1,
"resources": 1,
},
),
)

Expand Down
Loading