@@ -103,17 +103,17 @@ async def test_failed_task(self):
103103
104104    async  def  test_single_task (self ):
105105        # Grab all observations before we mock anything 
106-         observations  =  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_all (
107-             [ "Observation" ] 
106+         observations  =  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_resources (
107+             { "Observation" } 
108108        )
109109
110-         def  fake_load_all (internal_self , resources ):
110+         def  fake_load_resources (internal_self , resources ):
111111            del  internal_self 
112112            # Confirm we only tried to load one resource 
113-             self .assertEqual ([ "Observation" ] , resources )
113+             self .assertEqual ({ "Observation" } , resources )
114114            return  observations 
115115
116-         with  mock .patch .object (loaders .FhirNdjsonLoader , "load_all " , new = fake_load_all ):
116+         with  mock .patch .object (loaders .FhirNdjsonLoader , "load_resources " , new = fake_load_resources ):
117117            await  self .run_etl (tasks = ["observation" ])
118118
119119        # Confirm we only wrote the one resource 
@@ -126,17 +126,17 @@ def fake_load_all(internal_self, resources):
126126
127127    async  def  test_multiple_tasks (self ):
128128        # Grab all observations before we mock anything 
129-         loaded  =  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_all (
130-             [ "Observation" , "Patient" ] 
129+         loaded  =  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_resources (
130+             { "Observation" , "Patient" } 
131131        )
132132
133-         def  fake_load_all (internal_self , resources ):
133+         def  fake_load_resources (internal_self , resources ):
134134            del  internal_self 
135135            # Confirm we only tried to load two resources 
136-             self .assertEqual ({"Observation" , "Patient" }, set ( resources ) )
136+             self .assertEqual ({"Observation" , "Patient" }, resources )
137137            return  loaded 
138138
139-         with  mock .patch .object (loaders .FhirNdjsonLoader , "load_all " , new = fake_load_all ):
139+         with  mock .patch .object (loaders .FhirNdjsonLoader , "load_resources " , new = fake_load_resources ):
140140            await  self .run_etl (tasks = ["observation" , "patient" ])
141141
142142        # Confirm we only wrote the two resources 
@@ -267,16 +267,18 @@ async def test_task_init_checks(self, mock_check):
267267    async  def  test_completion_args (self , etl_args , loader_vals , expected_vals ):
268268        """Verify that we parse completion args with the correct fallbacks and checks.""" 
269269        # Grab all observations before we mock anything 
270-         observations  =  await  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_all (
271-             [ "Observation" ] 
270+         observations  =  await  loaders .FhirNdjsonLoader (store .Root (self .input_path )).load_resources (
271+             { "Observation" } 
272272        )
273273        observations .group_name  =  loader_vals [0 ]
274274        observations .export_datetime  =  loader_vals [1 ]
275275
276276        with  (
277277            self .assertRaises (SystemExit ) as  cm ,
278278            mock .patch ("cumulus_etl.etl.cli.etl_job" , side_effect = SystemExit ) as  mock_etl_job ,
279-             mock .patch .object (loaders .FhirNdjsonLoader , "load_all" , return_value = observations ),
279+             mock .patch .object (
280+                 loaders .FhirNdjsonLoader , "load_resources" , return_value = observations 
281+             ),
280282        ):
281283            await  self .run_etl (tasks = ["observation" ], ** etl_args )
282284
@@ -297,14 +299,36 @@ async def test_deleted_ids_passed_down(self):
297299            with  (
298300                self .assertRaises (SystemExit ),
299301                mock .patch ("cumulus_etl.etl.cli.etl_job" , side_effect = SystemExit ) as  mock_etl_job ,
300-                 mock .patch .object (loaders .FhirNdjsonLoader , "load_all " , return_value = results ),
302+                 mock .patch .object (loaders .FhirNdjsonLoader , "load_resources " , return_value = results ),
301303            ):
302304                await  self .run_etl (tasks = ["observation" ])
303305
304306        self .assertEqual (mock_etl_job .call_count , 1 )
305307        config  =  mock_etl_job .call_args [0 ][0 ]
306308        self .assertEqual ({"Observation" : {"obs1" }}, config .deleted_ids )
307309
310+     @ddt .data (["patient" ], None ) 
311+     async  def  test_missing_resources (self , tasks ):
312+         with  tempfile .TemporaryDirectory () as  tmpdir :
313+             with  self .assertRaises (SystemExit ) as  cm :
314+                 await  self .run_etl (tasks = tasks , input_path = tmpdir )
315+         self .assertEqual (errors .MISSING_REQUESTED_RESOURCES , cm .exception .code )
316+ 
317+     async  def  test_allow_missing_resources (self ):
318+         with  tempfile .TemporaryDirectory () as  tmpdir :
319+             await  self .run_etl ("--allow-missing-resources" , tasks = ["patient" ], input_path = tmpdir )
320+ 
321+         self .assertEqual ("" , common .read_text (f"{ self .output_path }  /patient/patient.000.ndjson" ))
322+ 
323+     async  def  test_missing_resources_skips_tasks (self ):
324+         with  tempfile .TemporaryDirectory () as  tmpdir :
325+             common .write_json (f"{ tmpdir }  /p.ndjson" , {"id" : "A" , "resourceType" : "Patient" })
326+             await  self .run_etl (input_path = tmpdir )
327+ 
328+         self .assertEqual (
329+             {"etl__completion" , "patient" , "JobConfig" }, set (os .listdir (self .output_path ))
330+         )
331+ 
308332
309333class  TestEtlJobConfig (BaseEtlSimple ):
310334    """Test case for the job config logging data""" 
0 commit comments