diff --git a/examples/annotation_import/audio.ipynb b/examples/annotation_import/audio.ipynb index c798be914..437130a9e 100644 --- a/examples/annotation_import/audio.ipynb +++ b/examples/annotation_import/audio.ipynb @@ -152,7 +152,7 @@ }, { "metadata": {}, - "source": "# Create one Labelbox dataset\n\nglobal_key = \"sample-audio-1.mp3\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/audio-sample-data/sample-audio-1.mp3\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"audio_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", + "source": "# Create one Labelbox dataset\n\nglobal_key = \"sample-audio-1.mp3\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/audio-sample-data/sample-audio-1.mp3\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"audio_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/conversational.ipynb b/examples/annotation_import/conversational.ipynb index d62ae9ee8..fd691b9a2 100644 --- a/examples/annotation_import/conversational.ipynb +++ b/examples/annotation_import/conversational.ipynb @@ -168,7 +168,7 @@ }, { "metadata": {}, - "source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", + "source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/conversational_LLM.ipynb b/examples/annotation_import/conversational_LLM.ipynb index fa1fe02b0..a1870990e 100644 --- a/examples/annotation_import/conversational_LLM.ipynb +++ b/examples/annotation_import/conversational_LLM.ipynb @@ -187,7 +187,7 @@ }, { "metadata": {}, - "source": "global_key = \"pairwise_shooping_asset\"\n\n# Upload data rows\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_annotation_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "global_key = \"pairwise_shooping_asset\" + str(uuid.uuid4())\n\n# Upload data rows\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_annotation_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/dicom.ipynb b/examples/annotation_import/dicom.ipynb index cd4c88497..3f6aa0326 100644 --- a/examples/annotation_import/dicom.ipynb +++ b/examples/annotation_import/dicom.ipynb @@ -119,7 +119,7 @@ }, { "metadata": {}, - "source": "global_key = \"sample-dicom-1.dcm\"\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/dicom-sample-data/sample-dicom-1.dcm\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"dicom_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors :\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "global_key = \"sample-dicom-1.dcm\" + str(uuid.uuid4())\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/dicom-sample-data/sample-dicom-1.dcm\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"dicom_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors :\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/html.ipynb b/examples/annotation_import/html.ipynb index 7ef731bce..567482878 100644 --- a/examples/annotation_import/html.ipynb +++ b/examples/annotation_import/html.ipynb @@ -159,7 +159,7 @@ }, { "metadata": {}, - "source": "# Create one Labelbox dataset\n\nglobal_key = \"sample_html_1.html\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_1.html\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"html_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", + "source": "# Create one Labelbox dataset\n\nglobal_key = \"sample_html_1.html\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_1.html\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"html_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/image.ipynb b/examples/annotation_import/image.ipynb index b8c6cd2a6..90ecf2123 100644 --- a/examples/annotation_import/image.ipynb +++ b/examples/annotation_import/image.ipynb @@ -288,7 +288,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\"\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"image-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\n\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\" + str(uuid.uuid4())\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"image-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/pdf.ipynb b/examples/annotation_import/pdf.ipynb index 5c0742ba5..1a9f1fc50 100644 --- a/examples/annotation_import/pdf.ipynb +++ b/examples/annotation_import/pdf.ipynb @@ -205,7 +205,7 @@ }, { "metadata": {}, - "source": "global_key = \"0801.3483_doc.pdf\"\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "global_key = \"0801.3483_doc.pdf\" + str(uuid.uuid4())\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/text.ipynb b/examples/annotation_import/text.ipynb index 1d4bb99d6..c682be2ed 100644 --- a/examples/annotation_import/text.ipynb +++ b/examples/annotation_import/text.ipynb @@ -168,7 +168,7 @@ }, { "metadata": {}, - "source": "# You can now include ohter fields like attachments, media type and metadata in the data row creation step: https://docs.labelbox.com/reference/text-file\nglobal_key = \"lorem-ipsum.txt\"\ntext_asset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"TEXT\",\n \"attachments\": [{\n \"type\":\n \"TEXT_URL\",\n \"value\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/text_attachment.txt\",\n }],\n}\n\ndataset = client.create_dataset(\n name=\"text_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([text_asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "# You can now include ohter fields like attachments, media type and metadata in the data row creation step: https://docs.labelbox.com/reference/text-file\nglobal_key = \"lorem-ipsum.txt\" + str(uuid.uuid4())\ntext_asset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"TEXT\",\n \"attachments\": [{\n \"type\":\n \"TEXT_URL\",\n \"value\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/text_attachment.txt\",\n }],\n}\n\ndataset = client.create_dataset(\n name=\"text_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([text_asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/tiled.ipynb b/examples/annotation_import/tiled.ipynb index 19cb28d4e..a5c0ea969 100644 --- a/examples/annotation_import/tiled.ipynb +++ b/examples/annotation_import/tiled.ipynb @@ -195,7 +195,7 @@ }, { "metadata": {}, - "source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\"\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\" + str(uuid.uuid4())\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/annotation_import/video.ipynb b/examples/annotation_import/video.ipynb index 726e22738..8a9369c21 100644 --- a/examples/annotation_import/video.ipynb +++ b/examples/annotation_import/video.ipynb @@ -253,7 +253,7 @@ }, { "metadata": {}, - "source": "global_key = \"sample-video-jellyfish.mp4\"\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"VIDEO\",\n}\n\ndataset = client.create_dataset(\n name=\"video_demo_dataset\",\n iam_integration=\n None, # If this argument is removed, labelbox will use the default integration for your organization.\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "global_key = \"sample-video-jellyfish.mp4\" + str(uuid.uuid4())\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"VIDEO\",\n}\n\ndataset = client.create_dataset(\n name=\"video_demo_dataset\",\n iam_integration=\n None, # If this argument is removed, labelbox will use the default integration for your organization.\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/basics/data_row_metadata.ipynb b/examples/basics/data_row_metadata.ipynb index fa5970ce2..8a63a0792 100644 --- a/examples/basics/data_row_metadata.ipynb +++ b/examples/basics/data_row_metadata.ipynb @@ -218,7 +218,7 @@ }, { "metadata": {}, - "source": "# A simple example of uploading data rows with metadata\ndataset = client.create_dataset(\n name=\"Simple Data Rows import with metadata example\")\nglobal_key = \"s_basic.jpg\"\ndata_row = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/basic.jpg\",\n \"global_key\":\n global_key,\n}\n# This line works with dictionaries as well as schemas and fields created with DataRowMetadataField\ndata_row[\"metadata_fields\"] = custom_metadata_fields + [\n split_metadata_field,\n capture_datetime_field_dict,\n tag_metadata_field,\n]\n\ntask = dataset.create_data_rows([data_row])\ntask.wait_till_done()\nresult_task = task.result\nprint(result_task)", + "source": "# A simple example of uploading data rows with metadata\ndataset = client.create_dataset(\n name=\"Simple Data Rows import with metadata example\")\nglobal_key = \"s_basic.jpg\" + str(uuid4())\ndata_row = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/basic.jpg\",\n \"global_key\":\n global_key,\n}\n# This line works with dictionaries as well as schemas and fields created with DataRowMetadataField\ndata_row[\"metadata_fields\"] = custom_metadata_fields + [\n split_metadata_field,\n capture_datetime_field_dict,\n tag_metadata_field,\n]\n\ntask = dataset.create_data_rows([data_row])\ntask.wait_till_done()\nresult_task = task.result\nprint(result_task)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/model_experiments/custom_metrics_demo.ipynb b/examples/model_experiments/custom_metrics_demo.ipynb index 8e0412f40..ed8516d2a 100644 --- a/examples/model_experiments/custom_metrics_demo.ipynb +++ b/examples/model_experiments/custom_metrics_demo.ipynb @@ -219,7 +219,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\"\ntest_img_urls = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"Custom metrics demo\",\n iam_integration=None)\ntask = dataset.create_data_rows([test_img_urls])\n\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\" + str(uuid.uuid4())\ntest_img_urls = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"Custom metrics demo\",\n iam_integration=None)\ntask = dataset.create_data_rows([test_img_urls])\n\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/model_experiments/model_predictions_to_project.ipynb b/examples/model_experiments/model_predictions_to_project.ipynb index a1560feec..ee86ff1b2 100644 --- a/examples/model_experiments/model_predictions_to_project.ipynb +++ b/examples/model_experiments/model_predictions_to_project.ipynb @@ -128,7 +128,7 @@ }, { "metadata": {}, - "source": "# send a sample image as data row for a dataset\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110\"\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"foundry-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\n\nprint(f\"Errors: {task.errors}\")\nprint(f\"Failed data rows: {task.failed_data_rows}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "# send a sample image as data row for a dataset\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110\" + str(uuid.uuid4())\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"foundry-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\n\nprint(f\"Errors: {task.errors}\")\nprint(f\"Failed data rows: {task.failed_data_rows}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/conversational_LLM_predictions.ipynb b/examples/prediction_upload/conversational_LLM_predictions.ipynb index be9878401..7d0b889ad 100644 --- a/examples/prediction_upload/conversational_LLM_predictions.ipynb +++ b/examples/prediction_upload/conversational_LLM_predictions.ipynb @@ -188,7 +188,7 @@ }, { "metadata": {}, - "source": "global_key = \"pairwise_shooping_asset\"\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_prediction_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "global_key = \"pairwise_shooping_asset\" + str(uuid.uuid4())\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_prediction_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/conversational_predictions.ipynb b/examples/prediction_upload/conversational_predictions.ipynb index b6a649d56..1b6da1ffc 100644 --- a/examples/prediction_upload/conversational_predictions.ipynb +++ b/examples/prediction_upload/conversational_predictions.ipynb @@ -146,7 +146,7 @@ }, { "metadata": {}, - "source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", + "source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/geospatial_predictions.ipynb b/examples/prediction_upload/geospatial_predictions.ipynb index e3c1212f3..d9035b969 100644 --- a/examples/prediction_upload/geospatial_predictions.ipynb +++ b/examples/prediction_upload/geospatial_predictions.ipynb @@ -168,7 +168,7 @@ }, { "metadata": {}, - "source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\"\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\" + uuid.uuid4()\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/html_predictions.ipynb b/examples/prediction_upload/html_predictions.ipynb index 14e5510a1..f78f256ea 100644 --- a/examples/prediction_upload/html_predictions.ipynb +++ b/examples/prediction_upload/html_predictions.ipynb @@ -131,7 +131,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"sample_html_2.html\"\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_2.html\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"html prediction demo dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "# send a sample image as batch to the project\nglobal_key = \"sample_html_2.html\" + str(uuid.uuid4())\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_2.html\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"html prediction demo dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/image_predictions.ipynb b/examples/prediction_upload/image_predictions.ipynb index b28c9f78f..69add64e3 100644 --- a/examples/prediction_upload/image_predictions.ipynb +++ b/examples/prediction_upload/image_predictions.ipynb @@ -266,7 +266,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s.jpeg\"\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(name=\"image_prediction_demo\")\ntask = dataset.create_data_rows([test_img_url])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s.jpeg\" + str(uuid.uuid4())\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(name=\"image_prediction_demo\")\ntask = dataset.create_data_rows([test_img_url])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/pdf_predictions.ipynb b/examples/prediction_upload/pdf_predictions.ipynb index 82d9f05cb..f1d2637e1 100644 --- a/examples/prediction_upload/pdf_predictions.ipynb +++ b/examples/prediction_upload/pdf_predictions.ipynb @@ -180,7 +180,7 @@ }, { "metadata": {}, - "source": "global_key = \"0801.3483.pdf\"\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", + "source": "global_key = \"0801.3483.pdf\" + str(uuid.uuid4())\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/text_predictions.ipynb b/examples/prediction_upload/text_predictions.ipynb index 8943ca17b..7e4cd048e 100644 --- a/examples/prediction_upload/text_predictions.ipynb +++ b/examples/prediction_upload/text_predictions.ipynb @@ -140,7 +140,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"lorem-ipsum.txt\"\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"text prediction demo dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", + "source": "# send a sample image as batch to the project\nglobal_key = \"lorem-ipsum.txt\" + str(uuid.uuid4())\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"text prediction demo dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null diff --git a/examples/prediction_upload/video_predictions.ipynb b/examples/prediction_upload/video_predictions.ipynb index b4a7a5cd4..63fe579bd 100644 --- a/examples/prediction_upload/video_predictions.ipynb +++ b/examples/prediction_upload/video_predictions.ipynb @@ -161,7 +161,7 @@ }, { "metadata": {}, - "source": "# send a sample image as batch to the project\nglobal_key = \"sample-video-2.mp4\"\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"Video prediction demo\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors: \", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", + "source": "# send a sample image as batch to the project\nglobal_key = \"sample-video-2.mp4\" + str(uuid.uuid4())\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n}\ndataset = client.create_dataset(\n name=\"Video prediction demo\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors: \", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)", "cell_type": "code", "outputs": [], "execution_count": null