diff --git a/examples/annotation_import/pdf.ipynb b/examples/annotation_import/pdf.ipynb index 1a9f1fc50..8e037f6a2 100644 --- a/examples/annotation_import/pdf.ipynb +++ b/examples/annotation_import/pdf.ipynb @@ -282,7 +282,7 @@ }, { "metadata": {}, - "source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json_str)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)", + "source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_buffered_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/basics/batches.ipynb b/examples/basics/batches.ipynb index 2d07682b5..5b8b4080c 100644 --- a/examples/basics/batches.ipynb +++ b/examples/basics/batches.ipynb @@ -121,7 +121,7 @@ }, { "metadata": {}, - "source": "client.enable_experimental = True\n\nexport_task = dataset.export()\nexport_task.wait_till_done()\n\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", + "source": "client.enable_experimental = True\n\nexport_task = dataset.export()\nexport_task.wait_till_done()\n\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", "cell_type": "code", "outputs": [], "execution_count": null @@ -254,7 +254,7 @@ }, { "metadata": {}, - "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", + "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/basics/custom_embeddings.ipynb b/examples/basics/custom_embeddings.ipynb index 45a0b2df0..7eaf030bc 100644 --- a/examples/basics/custom_embeddings.ipynb +++ b/examples/basics/custom_embeddings.ipynb @@ -102,7 +102,7 @@ }, { "metadata": {}, - "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", + "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/basics/data_rows.ipynb b/examples/basics/data_rows.ipynb index 46e758ee9..a6c87e691 100644 --- a/examples/basics/data_rows.ipynb +++ b/examples/basics/data_rows.ipynb @@ -113,7 +113,7 @@ }, { "metadata": {}, - "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", + "source": "data_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(data_row)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", "cell_type": "code", "outputs": [], "execution_count": null @@ -236,7 +236,7 @@ }, { "metadata": {}, - "source": "# Fetch a data row from the first dataset example\nts = dataset.export()\nts.wait_till_done()\nDATA_ROW_ID = [json.loads(output.json_str) for output in ts.get_stream()\n ][0][\"data_row\"][\"id\"]\nGLOBAL_KEY = [json.loads(output.json_str) for output in ts.get_stream()\n ][0][\"data_row\"][\"global_key\"]\n\nprint(f\"Pick either a data row id : {DATA_ROW_ID} or global key: {GLOBAL_KEY}\")", + "source": "# Fetch a data row from the first dataset example\nts = dataset.export()\nts.wait_till_done()\nDATA_ROW_ID = [json.loads(output.json) for output in ts.get_buffered_stream()\n ][0][\"data_row\"][\"id\"]\nGLOBAL_KEY = [json.loads(output.json) for output in ts.get_buffered_stream()\n ][0][\"data_row\"][\"global_key\"]\n\nprint(f\"Pick either a data row id : {DATA_ROW_ID} or global key: {GLOBAL_KEY}\")", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/exports/composite_mask_export.ipynb b/examples/exports/composite_mask_export.ipynb index ced50a4ea..60e2cb0c7 100644 --- a/examples/exports/composite_mask_export.ipynb +++ b/examples/exports/composite_mask_export.ipynb @@ -211,7 +211,7 @@ }, { "metadata": {}, - "source": "stream = export_task.get_stream()\n\nmask_tool_rgb_mapping = {}\n\nfor output in stream:\n # Parse the JSON string from the output\n output_json = json.loads(output.json_str)\n\n # Get the labels for the specified project ID or an empty list if the project ID is not found\n project_labels = (output_json[\"projects\"].get(PROJECT_ID,\n {}).get(\"labels\", []))\n\n # Iterate through each label\n for label in project_labels:\n # Get the list of annotations (objects) for the label\n annotations = label[\"annotations\"].get(\"objects\", [])\n\n # Iterate through each annotation\n for annotation in annotations:\n # Check if the annotation is of type \"ImageSegmentationMask\"\n if annotation.get(\"annotation_kind\") == \"ImageSegmentationMask\":\n # Add the color RGB information to the mapping dictionary\n mask_tool_rgb_mapping.setdefault(annotation[\"name\"], []).append(\n annotation[\"composite_mask\"][\"color_rgb\"])\n\nprint(mask_tool_rgb_mapping)", + "source": "stream = export_task.get_buffered_stream()\n\nmask_tool_rgb_mapping = {}\n\nfor output in stream:\n # Parse the JSON string from the output\n output_json = json.loads(output.json)\n\n # Get the labels for the specified project ID or an empty list if the project ID is not found\n project_labels = (output_json[\"projects\"].get(PROJECT_ID,\n {}).get(\"labels\", []))\n\n # Iterate through each label\n for label in project_labels:\n # Get the list of annotations (objects) for the label\n annotations = label[\"annotations\"].get(\"objects\", [])\n\n # Iterate through each annotation\n for annotation in annotations:\n # Check if the annotation is of type \"ImageSegmentationMask\"\n if annotation.get(\"annotation_kind\") == \"ImageSegmentationMask\":\n # Add the color RGB information to the mapping dictionary\n mask_tool_rgb_mapping.setdefault(annotation[\"name\"], []).append(\n annotation[\"composite_mask\"][\"color_rgb\"])\n\nprint(mask_tool_rgb_mapping)", "cell_type": "code", "outputs": [], "execution_count": null @@ -246,7 +246,7 @@ }, { "metadata": {}, - "source": "tools_frames_color = {}\nstream = export_task_video.get_stream()\n\n# Iterate over each output in the stream\nfor output in stream:\n output_json = json.loads(output.json_str)\n\n # Iterate over the labels in the specific project\n for dr in output_json[\"projects\"][VIDEO_PROJECT_ID][\"labels\"]:\n frames_data = dr[\"annotations\"][\"frames\"]\n\n # Iterate over each frame in the frames data\n for frame_key, frame_value in frames_data.items():\n\n # Iterate over each annotation in the frame\n for annotation_key, annotation_value in frame_value.items():\n if \"objects\" in annotation_key and annotation_value.values():\n\n # Iterate over each object in the annotation\n for object_key, object_value in annotation_value.items():\n if (object_value[\"annotation_kind\"] ==\n \"VideoSegmentationMask\"):\n # Update tools_frames_color with object information\n tools_frames_color.setdefault(\n object_value[\"name\"], []).append({\n frame_key:\n object_value[\"composite_mask\"]\n [\"color_rgb\"]\n })\n\nprint(tools_frames_color)", + "source": "tools_frames_color = {}\nstream = export_task_video.get_buffered_stream()\n\n# Iterate over each output in the stream\nfor output in stream:\n output_json = json.loads(output.json)\n\n # Iterate over the labels in the specific project\n for dr in output_json[\"projects\"][VIDEO_PROJECT_ID][\"labels\"]:\n frames_data = dr[\"annotations\"][\"frames\"]\n\n # Iterate over each frame in the frames data\n for frame_key, frame_value in frames_data.items():\n\n # Iterate over each annotation in the frame\n for annotation_key, annotation_value in frame_value.items():\n if \"objects\" in annotation_key and annotation_value.values():\n\n # Iterate over each object in the annotation\n for object_key, object_value in annotation_value.items():\n if (object_value[\"annotation_kind\"] ==\n \"VideoSegmentationMask\"):\n # Update tools_frames_color with object information\n tools_frames_color.setdefault(\n object_value[\"name\"], []).append({\n frame_key:\n object_value[\"composite_mask\"]\n [\"color_rgb\"]\n })\n\nprint(tools_frames_color)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/exports/export_data.ipynb b/examples/exports/export_data.ipynb index e0fa8f728..0054a2d4a 100644 --- a/examples/exports/export_data.ipynb +++ b/examples/exports/export_data.ipynb @@ -154,14 +154,14 @@ }, { "metadata": {}, - "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null }, { "metadata": {}, - "source": "# Uncomment to get stream results as a written file\n\n# Provide results with file converter\n\n# if export_task.has_errors():\n# export_task.get_stream(\n# converter=lb.FileConverter(file_path=\"./errors.txt\"),\n# stream_type=lb.StreamType.ERRORS\n# ).start()\n\n# if export_task.has_result():\n# export_task.get_stream(\n# converter=lb.FileConverter(file_path=\"./result.txt\"),\n# stream_type=lb.StreamType.RESULT\n# ).start()", + "source": "# Uncomment to get stream results as a written file\n\n# Provide results with file converter\n\n# if export_task.has_errors():\n# export_task.get_buffered_stream(\n# converter=lb.FileConverter(file_path=\"./errors.txt\"),\n# stream_type=lb.StreamType.ERRORS\n# ).start()\n\n# if export_task.has_result():\n# export_task.get_buffered_stream(\n# converter=lb.FileConverter(file_path=\"./result.txt\"),\n# stream_type=lb.StreamType.RESULT\n# ).start()", "cell_type": "code", "outputs": [], "execution_count": null @@ -237,7 +237,7 @@ }, { "metadata": {}, - "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null @@ -288,7 +288,7 @@ }, { "metadata": {}, - "source": "def json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "def json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null @@ -346,7 +346,7 @@ }, { "metadata": {}, - "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null @@ -410,7 +410,7 @@ }, { "metadata": {}, - "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null @@ -468,7 +468,7 @@ }, { "metadata": {}, - "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.JsonConverterOutput):\n print(output.json_str)\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", + "source": "# Provide results with JSON converter\n# Returns streamed JSON output strings from export task results/errors, one by one\n\n\n# Callback used for JSON Converter\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n print(output.json)\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)\n\nprint(\n \"file size: \",\n export_task.get_total_file_size(stream_type=lb.StreamType.RESULT),\n)\nprint(\n \"line count: \",\n export_task.get_total_lines(stream_type=lb.StreamType.RESULT),\n)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/pdf_predictions.ipynb b/examples/prediction_upload/pdf_predictions.ipynb index f1d2637e1..942d40e9e 100644 --- a/examples/prediction_upload/pdf_predictions.ipynb +++ b/examples/prediction_upload/pdf_predictions.ipynb @@ -256,7 +256,7 @@ }, { "metadata": {}, - "source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json_str)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)", + "source": "client.enable_experimental = True\ntask = lb.DataRow.export(client=client, global_keys=[global_key])\ntask.wait_till_done()\nstream = task.get_buffered_stream()\n\ntext_layer = \"\"\nfor output in stream:\n output_json = json.loads(output.json)\n text_layer = output_json[\"media_attributes\"][\"text_layer_url\"]\nprint(text_layer)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/project_configuration/queue_management.ipynb b/examples/project_configuration/queue_management.ipynb index 3ee993dd0..0b62ea9d3 100644 --- a/examples/project_configuration/queue_management.ipynb +++ b/examples/project_configuration/queue_management.ipynb @@ -162,7 +162,7 @@ }, { "metadata": {}, - "source": "# Get data rows from project\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.JsonConverterOutput):\n data_row = json.loads(output.json_str)\n data_rows.append(lb.GlobalKey(data_row[\"data_row\"][\"global_key\"])\n ) # Convert json data row into data row identifier object\n\n\nif export_task.has_errors():\n export_task.get_stream(converter=lb.JsonConverter(),\n stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_stream(\n converter=lb.JsonConverter(), stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", + "source": "# Get data rows from project\ndata_rows = []\n\n\ndef json_stream_handler(output: lb.BufferedJsonConverterOutput):\n data_row = json.loads(output.json)\n data_rows.append(lb.GlobalKey(data_row[\"data_row\"][\"global_key\"])\n ) # Convert json data row into data row identifier object\n\n\nif export_task.has_errors():\n export_task.get_buffered_stream(stream_type=lb.StreamType.ERRORS).start(\n stream_handler=lambda error: print(error))\n\nif export_task.has_result():\n export_json = export_task.get_buffered_stream(\n stream_type=lb.StreamType.RESULT).start(\n stream_handler=json_stream_handler)", "cell_type": "code", "outputs": [], "execution_count": null