diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a8e27bc..815f404 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -11,6 +11,19 @@ on: env: DJANGO_SETTINGS_MODULE: eudr_backend.settings + EE_ACCOUNT_NAME: ${{ secrets.EE_ACCOUNT_NAME }} + AGSTACK_API_EMAIL: ${{ secrets.AGSTACK_API_EMAIL }} + AGSTACK_API_PASSWORD: ${{ secrets.AGSTACK_API_PASSWORD }} + EMAIL_HOST_USER: ${{ secrets.EMAIL_HOST_USER }} + EMAIL_HOST_PASSWORD: ${{ secrets.EMAIL_HOST_PASSWORD }} + EMAIL_HOST_DEFAULT_USER: ${{ secrets.EMAIL_HOST_DEFAULT_USER }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_STORAGE_BUCKET_NAME: ${{ secrets.AWS_STORAGE_BUCKET_NAME }} + AWS_S3_REGION_NAME: ${{ secrets.AWS_S3_REGION_NAME }} + WHISP_API_KEY: ${{ secrets.WHISP_API_KEY }} + SERVER_USER: ${{ secrets.SERVER_USER }} + SERVER_HOST: ${{ secrets.SERVER_HOST }} jobs: test: @@ -47,6 +60,7 @@ jobs: echo "EE_ACCOUNT_NAME=$EE_ACCOUNT_NAME" >> .env echo "SERVER_USER=$SERVER_USER" >> .env echo "SERVER_HOST=$SERVER_HOST" >> .env + echo "WHISP_API_KEY=$WHISP_API_KEY" >> .env - name: Create ee creds json file run: | diff --git a/eudr_backend/async_tasks.py b/eudr_backend/async_tasks.py index 5a19e16..12dc2a9 100644 --- a/eudr_backend/async_tasks.py +++ b/eudr_backend/async_tasks.py @@ -5,6 +5,7 @@ from eudr_backend.models import EUDRFarmModel, EUDRUploadedFilesModel, WhispAPISetting from eudr_backend.serializers import EUDRFarmModelSerializer from eudr_backend.utils import flatten_geojson, format_geojson_data, transform_db_data_to_geojson +from decouple import config # Define an async function @@ -27,7 +28,7 @@ async def async_create_farm_data(data, file_id, isSyncing=False, hasCreatedFiles return errors, created_data else: err, analysis_results = await perform_analysis(data) - print(analysis_results) + # print(analysis_results) if err: # delete the file if there are errors EUDRUploadedFilesModel.objects.get(id=file_id).delete() @@ -59,8 +60,13 @@ async def get_existing_record(data): async def perform_analysis(data, hasCreatedFiles=[]): + api_key = config("WHISP_API_KEY") + if not api_key: + raise ValueError("WHISP_API_KEY environment variable not set.") + + # print(f"Using API Key: {api_key}") url = "https://whisp.openforis.org/api/submit/geojson" - headers = {"X-API-KEY": "7f7aaa3a-e99e-4c53-b79c-d06143ef8c1f", + headers = {"X-API-KEY": api_key, "Content-Type": "application/json"} settings = await sync_to_async(WhispAPISetting.objects.first)() chunk_size = settings.chunk_size if settings else 500 @@ -92,7 +98,9 @@ async def perform_analysis(data, hasCreatedFiles=[]): async def save_farm_data(data, file_id, analysis_results=None): + # print("analysis results",analysis_results) formatted_data = format_geojson_data(data, analysis_results, file_id) + # print("formatted data",formatted_data) saved_records = [] for item in formatted_data: diff --git a/eudr_backend/settings.py b/eudr_backend/settings.py index d77bb14..d314efe 100644 --- a/eudr_backend/settings.py +++ b/eudr_backend/settings.py @@ -208,6 +208,7 @@ def initialize_earth_engine(): AGSTACK_EMAIL = config('AGSTACK_API_EMAIL') AGSTACK_PASSWORD = config('AGSTACK_API_PASSWORD') +WHISP_API_KEY = config('WHISP_API_KEY') # email credentials # settings.py diff --git a/eudr_backend/tasks.py b/eudr_backend/tasks.py index f365243..49cf2bf 100644 --- a/eudr_backend/tasks.py +++ b/eudr_backend/tasks.py @@ -18,10 +18,11 @@ def get_access_token(): response = requests.post(login_url, json=payload) response.raise_for_status() # Raise an error for bad responses data = response.json() + # print("login successfully",data) return data['access_token'] -@background(schedule=60) # Schedule task to run every 5 minutes +# @background(schedule=60) # Schedule task to run every 5 minutes def update_geoid(user_id): access_token = get_access_token() headers = { @@ -32,10 +33,13 @@ def update_geoid(user_id): user_files = EUDRUploadedFilesModel.objects.filter(uploaded_by=user_id) file_ids = user_files.values_list('id', flat=True) + # print("fileids",file_ids) + # Filter farms based on these file IDs and geoid being null farms = EUDRFarmModel.objects.filter( geoid__isnull=True, file_id__in=file_ids) for farm in farms: + # print("Raw polygon value:", farm.polygon) # check if polygon has only one ring if len(farm.polygon) != 1: continue @@ -43,18 +47,25 @@ def update_geoid(user_id): reversed_coords = [[(lat, lon) for lat, lon in ring] for ring in farm.polygon] -# Create a Shapely Polygon + # Create a Shapely Polygon polygon = Polygon(reversed_coords[0]) + # print("converted polygon",polygon ) + # Convert to WKT format wkt_format = wkt.dumps(polygon) + # print("WKT format",wkt_format) + response = requests.post( f'{AG_BASE_URL}/register-field-boundary', json={"wkt": wkt_format}, headers=headers ) + # print("API response status:", response.status_code) + # print("API response text:", response.text) data = response.json() + # print("geo id data", data) if response.status_code == 200: farm.geoid = data.get("Geo Id") farm.save() diff --git a/eudr_backend/utils.py b/eudr_backend/utils.py index cbc6188..1f930c0 100644 --- a/eudr_backend/utils.py +++ b/eudr_backend/utils.py @@ -140,7 +140,166 @@ def flatten_geojson(geojson): return geojson +# def format_geojson_data(geojson, analysis, file_id=None): +# # Ensure the GeoJSON contains features +# geojson = json.loads(geojson) if isinstance(geojson, str) else geojson +# features = geojson.get('features', []) +# if not features: +# return [] + +# formatted_data_list = [] +# for i, feature in enumerate(features): +# properties = feature.get('properties', {}) +# geometry = feature.get('geometry', {}) + +# # Determine if the geometry is a Polygon and extract coordinates +# is_polygon = geometry.get('type') == 'Polygon' or geometry.get( +# 'type') == 'MultiPolygon' +# coordinates = geometry.get('coordinates', []) + +# # make union of coordinates if it is a MultiPolygon +# if geometry.get('type') == 'MultiPolygon': +# coordinates = [] +# for polygon in geometry.get('coordinates', []): +# coordinates.extend(polygon) +# else: +# coordinates = geometry.get('coordinates', []) + +# latitude = coordinates[1] if not is_polygon and len( +# coordinates) > 1 else properties.get('Centroid_lat', 0.0) +# longitude = coordinates[0] if not is_polygon and len( +# coordinates) > 0 else properties.get('Centroid_lon', 0.0) +# formatted_data = { +# "remote_id": properties.get("remote_id"), +# "farmer_name": properties.get("farmer_name"), +# "farm_size": float(properties.get("farm_size", properties.get('Plot_area_ha', 0))), +# "collection_site": properties.get("collection_site"), +# "agent_name": properties.get("agent_name"), +# "farm_village": properties.get("farm_village"), +# "farm_district": properties.get("farm_district", properties.get('Admin_Level_1')), +# "latitude": latitude, +# "longitude": longitude, +# "polygon": coordinates, +# "polygon_type": geometry.get('type'), +# "geoid": properties.get("geoid"), +# "file_id": file_id, +# "analysis": { +# "is_in_protected_areas": False, +# "is_in_water_body": properties.get('In_waterbody'), +# "forest_change_loss_after_2020": analysis[i].get('GFC_loss_after_2020'), +# "fire_after_2020": analysis[i].get('MODIS_fire_after_2020'), +# "radd_after_2020": analysis[i].get('RADD_after_2020'), +# "tmf_deforestation_after_2020": analysis[i].get('TMF_def_after_2020'), +# "tmf_degradation_after_2020": analysis[i].get('TMF_deg_after_2020'), +# "tmf_disturbed": analysis[i].get('TMF_disturbed'), +# "tree_cover_loss": analysis[i].get('Indicator_1_treecover'), +# "commodities": analysis[i].get('Indicator_2_commodities'), +# "disturbance_before_2020": analysis[i].get('Indicator_3_disturbance_before_2020'), +# "disturbance_after_2020": analysis[i].get('Indicator_4_disturbance_after_2020'), +# "eudr_risk_level": analysis[i].get('EUDR_risk') +# } +# } +# formatted_data_list.append(formatted_data) + +# return formatted_data_list + + def format_geojson_data(geojson, analysis, file_id=None): + """ + Format GeoJSON data with proper field mapping and transformations + """ + + def transform_indicator_value(value): + """Transform indicator yes/no values to boolean""" + if value == 'yes': + return True + elif value == 'no': + return False + else: + return False # Default to False instead of None + + def transform_numeric_value(value, keep_zero=True): + """Transform numeric values, optionally keeping 0 values""" + if value is None: + return 0 if keep_zero else None + if isinstance(value, (int, float)): + return value + return 0 if keep_zero else None + + # Mapping of commodity to corresponding risk key + COMMODITY_RISK_MAP = { + "Coffee": "risk_pcrop", + "Cocoa": "risk_pcrop", + "Rubber": "risk_acrop", + "Oil palm": "risk_acrop", + "Soy": "risk_acrop", + "Livestock": "risk_livestock", + "Timber": "risk_timber" + } + + + def extract_risk_level_by_commodity(analysis_result, commodity): + try: + properties = analysis_result[0]['properties'] + commodity = properties.get("commodity", "Coffee") # Default to 'Coffee' if missing + risk_key = COMMODITY_RISK_MAP.get(commodity) + + if risk_key and risk_key in properties: + return properties[risk_key] + else: + print(f"No risk key found for commodity: {commodity}") + return None + + except (IndexError, KeyError, TypeError) as e: + print(f"Error extracting risk level for {commodity}: {e}") + return None + + + # def derive_eudr_risk_level(properties): + # """Derive EUDR risk level from individual risk fields""" + # # risk_fields = ['risk_pcrop', 'risk_acrop', 'risk_timber'] + # # risk_values = [properties.get(field) for field in risk_fields if properties.get(field)] + + # risk_values= properties.get('risk_pcrop') + # # if not risk_values: + # # return 'low' # Default to 'low' instead of None + + # print("risk value",risk_values) + + # # Priority order: high > medium > low > more_info_needed + # if 'high' in risk_values: + # return 'high' + # elif 'medium' in risk_values: + # return 'medium' + # elif 'low' in risk_values: + # return 'low' + # elif 'more_info_needed' in risk_values: + # return 'more_info_needed' + # # else: + # # return 'low' # Default fallback + + def extract_risk_levels(analysis_result): + try: + properties = analysis_result[0]['properties'] + risk_keys = ['risk_pcrop', 'risk_acrop', 'risk_timber'] + + risk_values = {} + for key in risk_keys: + if key in properties: + risk_values[key] = properties[key] + else: + # Optionally skip or handle missing keys + risk_values[key] = None # or simply skip adding it + + return risk_values # Dictionary of actual values or None if not found + + except (IndexError, KeyError, TypeError) as e: + print(f"Error extracting risk levels: {e}") + return None + + + risk_info = extract_risk_levels(analysis) + # Ensure the GeoJSON contains features geojson = json.loads(geojson) if isinstance(geojson, str) else geojson features = geojson.get('features', []) @@ -152,12 +311,19 @@ def format_geojson_data(geojson, analysis, file_id=None): properties = feature.get('properties', {}) geometry = feature.get('geometry', {}) + risk_info_commodity = extract_risk_level_by_commodity( analysis, commodity=properties.get("commodity") or "Coffee",) + # print(risk_info_commodity) + + # # Debug: Print available property keys for first feature + # if i == 0: + # print(f"Available property keys: {list(properties.keys())}") + # print(f"Analysis array length: {len(analysis) if analysis else 0}") + # Determine if the geometry is a Polygon and extract coordinates - is_polygon = geometry.get('type') == 'Polygon' or geometry.get( - 'type') == 'MultiPolygon' + is_polygon = geometry.get('type') == 'Polygon' or geometry.get('type') == 'MultiPolygon' coordinates = geometry.get('coordinates', []) - # make union of coordinates if it is a MultiPolygon + # Handle MultiPolygon coordinates if geometry.get('type') == 'MultiPolygon': coordinates = [] for polygon in geometry.get('coordinates', []): @@ -165,14 +331,38 @@ def format_geojson_data(geojson, analysis, file_id=None): else: coordinates = geometry.get('coordinates', []) - latitude = coordinates[1] if not is_polygon and len( - coordinates) > 1 else properties.get('Centroid_lat', 0.0) - longitude = coordinates[0] if not is_polygon and len( - coordinates) > 0 else properties.get('Centroid_lon', 0.0) + # Extract latitude and longitude + latitude = coordinates[1] if not is_polygon and len(coordinates) > 1 else properties.get('Centroid_lat', 0.0) + longitude = coordinates[0] if not is_polygon and len(coordinates) > 0 else properties.get('Centroid_lon', 0.0) + + # Get analysis data - prioritize analysis array, fallback to properties + # feature_analysis = {} + # if analysis and i < len(analysis) and analysis[i]: + # feature_analysis = analysis[i] + # # Merge properties into feature_analysis for complete data access + # merged_data = {**properties, **feature_analysis} + # else: + # merged_data = properties + feature_analysis = {} + if analysis and i < len(analysis) and analysis[i]: + feature_analysis = analysis[i] + + # Debug: Print analysis data for first feature + # if i == 0 and feature_analysis: + # print(f"Analysis data keys: {list(feature_analysis.keys())}") + # print(f"Sample analysis values: {dict(list(feature_analysis.items())[:5])}") + + # Helper function to get value from merged data + # def get_field_value(field_name): + # return merged_data.get(field_name) + def get_field_value(field_name): + return feature_analysis.get(field_name, properties.get(field_name)) + formatted_data = { "remote_id": properties.get("remote_id"), + "commodity":properties.get("commodity") or "Coffee", "farmer_name": properties.get("farmer_name"), - "farm_size": float(properties.get("farm_size", properties.get('Plot_area_ha', 0))), + "farm_size": float(properties.get("farm_size", properties.get('Area', properties.get('Plot_area_ha', 0)))), "collection_site": properties.get("collection_site"), "agent_name": properties.get("agent_name"), "farm_village": properties.get("farm_village"), @@ -184,19 +374,83 @@ def format_geojson_data(geojson, analysis, file_id=None): "geoid": properties.get("geoid"), "file_id": file_id, "analysis": { - "is_in_protected_areas": False, - "is_in_water_body": properties.get('In_waterbody'), - "forest_change_loss_after_2020": analysis[i].get('GFC_loss_after_2020'), - "fire_after_2020": analysis[i].get('MODIS_fire_after_2020'), - "radd_after_2020": analysis[i].get('RADD_after_2020'), - "tmf_deforestation_after_2020": analysis[i].get('TMF_def_after_2020'), - "tmf_degradation_after_2020": analysis[i].get('TMF_deg_after_2020'), - "tmf_disturbed": analysis[i].get('TMF_disturbed'), - "tree_cover_loss": analysis[i].get('Indicator_1_treecover'), - "commodities": analysis[i].get('Indicator_2_commodities'), - "disturbance_before_2020": analysis[i].get('Indicator_3_disturbance_before_2020'), - "disturbance_after_2020": analysis[i].get('Indicator_4_disturbance_after_2020'), - "eudr_risk_level": analysis[i].get('EUDR_risk') + # Protected areas - check multiple possible fields + "is_in_protected_areas": bool( + get_field_value('protected_area') or + get_field_value('Protected_Area') or + get_field_value('IFL_2020') or + get_field_value('European_Primary_Forest') + ), + + # Water body mapping + "is_in_water_body": bool(get_field_value('In_waterbody')), + + # Forest change loss after 2020 - sum multiple sources + "forest_change_loss_after_2020": transform_numeric_value( + sum(filter(None, [ + get_field_value('GFC_loss_after_2020') or 0, + get_field_value('TMF_def_after_2020') or 0, + get_field_value('TMF_deg_after_2020') or 0 + ])) + ), + + # Fire after 2020 - use available fire data + "fire_after_2020": transform_numeric_value( + get_field_value('MODIS_fire_after_2020') or + get_field_value('ESA_fire_after_2020') or 0 + ), + + # RADD alerts after 2020 + "radd_after_2020": transform_numeric_value( + get_field_value('RADD_after_2020') or 0 + ), + + # TMF deforestation after 2020 + "tmf_deforestation_after_2020": transform_numeric_value( + get_field_value('TMF_def_after_2020') or 0 + ), + + # TMF degradation after 2020 + "tmf_degradation_after_2020": transform_numeric_value( + get_field_value('TMF_deg_after_2020') or 0 + ), + + # Tree cover loss before 2020 + "tree_cover_loss": transform_numeric_value( + get_field_value('GFC_loss_before_2020') or + get_field_value('TMF_def_before_2020') or 0 + ), + + # TMF disturbed - invert TMF_undist or check disturbance indicators + "tmf_disturbed": ( + not bool(get_field_value('TMF_undist')) if get_field_value('TMF_undist') is not None + else bool(get_field_value('TMF_def_after_2020') or get_field_value('TMF_deg_after_2020')) + ), + + # Commodities - from indicator field + "commodities": transform_indicator_value( + get_field_value('Ind_02_commodities') + ), + + # Disturbance before 2020 + "disturbance_before_2020": transform_indicator_value( + get_field_value('Ind_03_disturbance_before_2020') + ), + + # Disturbance after 2020 + "disturbance_after_2020": transform_indicator_value( + get_field_value('Ind_04_disturbance_after_2020') + ), + + # EUDR risk level - use available risk data + "eudr_risk_level": ( + # get_field_value('EUDR_risk') or + # derive_eudr_risk_level(merged_data) + # derive_eudr_risk_level(properties) or + # derive_eudr_risk_level(feature_analysis) + # risk_info['risk_pcrop'] + risk_info_commodity + ) } } formatted_data_list.append(formatted_data) @@ -204,6 +458,7 @@ def format_geojson_data(geojson, analysis, file_id=None): return formatted_data_list + def is_valid_polygon(polygon): # Check if the polygon is a list and has at least 3 points, each with exactly 2 coordinates try: diff --git a/eudr_backend/validators.py b/eudr_backend/validators.py index 7ad3ee4..448c365 100644 --- a/eudr_backend/validators.py +++ b/eudr_backend/validators.py @@ -10,7 +10,8 @@ 'farm_village', 'latitude', 'longitude', - 'polygon' + 'polygon', + 'commodity' ] OPTIONAL_FIELDS = [ @@ -24,6 +25,7 @@ ] GEOJSON_REQUIRED_FIELDS = ['geometry', + 'commodity' 'farmer_name', 'farm_size', 'collection_site', @@ -124,6 +126,7 @@ def validate_geojson(data: dict) -> bool: 'farm_size': (int, float), 'latitude': (int, float), 'longitude': (int, float), + 'commodity' : str } for prop, prop_type in required_properties.items(): if not isinstance(properties.get(prop), prop_type): diff --git a/eudr_backend/views.py b/eudr_backend/views.py index a4e1a63..f94fa54 100644 --- a/eudr_backend/views.py +++ b/eudr_backend/views.py @@ -557,6 +557,7 @@ def create_farm_data(request): file_name = file.name.split('.')[0] # Custom function to read data from file if needed raw_data = extract_data_from_file(file, data_format) + # print("raw data",raw_data) else: file_name = "uploaded_data" @@ -567,6 +568,7 @@ def create_farm_data(request): errors = validate_geojson(raw_data) elif data_format == 'csv': errors = validate_csv(raw_data) + # print("errors",errors) else: return Response({'error': 'Unsupported format'}, status=status.HTTP_400_BAD_REQUEST) @@ -577,6 +579,7 @@ def create_farm_data(request): if data_format == 'csv': raw_data = transform_csv_to_json(raw_data) + # print("raw data converted to json",raw_data) # Combine file_name and format for database entry file_data = { @@ -606,10 +609,11 @@ def create_farm_data(request): # Custom function to handle failed file entries handle_failed_file_entry(file_serializer, file, request.user) return Response({'error': 'File serialization failed'}, status=status.HTTP_400_BAD_REQUEST) - + # print("username",request.user.username,request.user.is_authenticated ) # Proceed with other operations... - update_geoid(repeat=60, - user_id=request.user.username if request.user.is_authenticated else "admin") + # update_geoid(repeat=60, + # user_id=request.user.username if request.user.is_authenticated else "admin") + update_geoid(user_id=request.user.username if request.user.is_authenticated else "admin") store_file_in_s3(file, request.user, file_name, True) if file else None return Response({'message': 'File/data processed successfully', 'file_id': file_id}, status=status.HTTP_201_CREATED) @@ -1388,6 +1392,9 @@ def retrieve_all_synced_farm_data_by_cs(request, pk): @permission_classes([IsAuthenticated]) def retrieve_collection_sites(request): data = EUDRCollectionSiteModel.objects.all().order_by("-updated_at") + + # Corrected line: Call .count() to get the actual number + # print("collection site data", data.count()) serializer = EUDRCollectionSiteModelSerializer(data, many=True) diff --git a/my_eudr_app/templates/validator.html b/my_eudr_app/templates/validator.html index 926d903..8bf3c28 100644 --- a/my_eudr_app/templates/validator.html +++ b/my_eudr_app/templates/validator.html @@ -219,10 +219,10 @@
Filter Plots List by Date
> Farmer Name - Size (Ha) - Site - Village - District + Size (Ha) + Site + Village + District diff --git a/my_eudr_app/test_integrations.py b/my_eudr_app/test_integrations.py index a8f9e89..c582971 100644 --- a/my_eudr_app/test_integrations.py +++ b/my_eudr_app/test_integrations.py @@ -80,6 +80,7 @@ def test_full_farm_data_flow(self): "type": "Feature", "properties": { "remote_id": "1b8313c1-c584-4e6b-8a1c-3e9fd962798b", + "commodity": "COFFEE", "farmer_name": "sjee", "member_id": "", "collection_site": "fhfh", @@ -101,6 +102,7 @@ def test_full_farm_data_flow(self): "type": "Feature", "properties": { "remote_id": "8317a3ca-c1c2-4990-98e4-055fbd8e4e19", + "commodity": "COFFEE", "farmer_name": "shdh", "member_id": "", "collection_site": "fhfh", diff --git a/my_eudr_app/tests.py b/my_eudr_app/tests.py index 15fd161..4039203 100644 --- a/my_eudr_app/tests.py +++ b/my_eudr_app/tests.py @@ -237,6 +237,7 @@ def test_create_farm_data(self): data = {"type": "FeatureCollection", "features": [{ "type": "Feature", "properties": { + "commodity": "COFFEE", "farmer_name": "sjee", "member_id": "", "collection_site": "fhfh", @@ -254,6 +255,7 @@ def test_create_farm_data(self): }, { "type": "Feature", "properties": { + "commodity": "COFFEE", "farmer_name": "shdh", "member_id": "", "collection_site": "fhfh", @@ -273,6 +275,8 @@ def test_create_farm_data(self): } }]} response = self.client.post(url, data, format='json') + print("response code",response.status_code) + print("response data",response.data) self.file.id = response.data['file_id'] self.assertEqual(response.status_code, status.HTTP_201_CREATED) diff --git a/staticfiles/assets/js/custom.js b/staticfiles/assets/js/custom.js index 29936dc..433c853 100644 --- a/staticfiles/assets/js/custom.js +++ b/staticfiles/assets/js/custom.js @@ -464,16 +464,24 @@ if (response) { return resp.json(); }) .then((data) => { + // const farmsWithAnalysis = data.filter(farm => farm.analysis); + // console.log("famr data",data) + // console.log("analysis",data[0].analysis?.risk_timber) + // data.forEach((farm, index) => { + // const risk = farm.analysis?.risk_timber ?? "not provided"; + // console.log(`Farm #${index} | Agent: ${farm.agent_name} | risk_timber: ${risk}`); + // }); + farmData = data; filteredFarms = data.map((farm) => { farm.updated_at = new Date(farm.updated_at).toLocaleString(); return farm; }); - // order filteredFarms by analysis.eudr_risk_level + // order filteredFarms by farmData[i].analysis?.risk_timber filteredFarms.sort((a, b) => { - if (a.analysis.eudr_risk_level === "low") { + if (a.analysis?.risk_timber === "low") { return -1; - } else if (a.analysis.eudr_risk_level === "medium") { + } else if (a.analysis?.risk_timber === "medium") { return 0; } else { return 1; @@ -484,13 +492,13 @@ if (response) { document.querySelector("#total_farms").innerText = data.length; // check where eudr_risk_level is high and calculate the percentage const lowRiskFarms = data.filter( - (farm) => farm.analysis.eudr_risk_level === "low" + (farm) => farm.analysis?.eudr_risk_level === "low" ); const highRiskFarms = data.filter( - (farm) => farm.analysis.eudr_risk_level === "high" + (farm) => farm.analysis?.eudr_risk_level === "high" ); const moreInfoNeededFarms = data.filter( - (farm) => farm.analysis.eudr_risk_level === "more_info_needed" + (farm) => farm.analysis?.eudr_risk_level === "more_info_needed" ); const lowPercentage = ( @@ -770,6 +778,8 @@ collectionSiteDropdown?.addEventListener("change", (e) => { farmsContainer.innerHTML = ""; + console.log("filtered farms",filteredFarms); + generateData(filteredFarms, farmsContainer); }); @@ -783,7 +793,7 @@ document selectedRiskLevel === "" ? farmData : farmData.filter( - (farm) => farm.analysis.eudr_risk_level === selectedRiskLevel + (farm) => farm.farmData[i].analysis?.risk_timber === selectedRiskLevel ); farmsContainer.innerHTML = ""; @@ -1086,7 +1096,7 @@ function generateData(farmData, farmsContainer) { }

-

${ - farmData[i].analysis.eudr_risk_level + typeof farmData[i].analysis?.eudr_risk_level === "string" + ? farmData[i].analysis.eudr_risk_level .replace(/_/g, " ") - .replace(/\b\w/g, (char) => char.toUpperCase()) || "-" + .replace(/\b\w/g, (char) => char.toUpperCase()) + : "-" }