From 2d3898b1a7a614f9b4372e1643bccc6d8310ba72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Tue, 11 Feb 2025 19:09:05 +0700 Subject: [PATCH 1/8] chore: add function to check for non-version changes in package.json during version bump --- .../workflows/check-and-bump-versions.yaml | 35 +++++++++++++++++-- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check-and-bump-versions.yaml b/.github/workflows/check-and-bump-versions.yaml index b8292ab5a6..cf558c6d70 100644 --- a/.github/workflows/check-and-bump-versions.yaml +++ b/.github/workflows/check-and-bump-versions.yaml @@ -261,6 +261,24 @@ jobs: echo "• New checksum (${#NEW_CHECKSUM} chars): $NEW_CHECKSUM" echo "• Match status: $([ "$OLD_CHECKSUM" = "$NEW_CHECKSUM" ] && echo "identical" || echo "different")" + # Add function to check if only version changed in package.json + check_package_json_changes() { + if [ "${{ github.event_name }}" = "pull_request" ]; then + DIFF=$(git diff "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}" -- package.json) + else + DIFF=$(git diff HEAD~1 HEAD -- package.json) + fi + + # Count number of changed lines that aren't the version field + NON_VERSION_CHANGES=$(echo "$DIFF" | grep -v '"version":' | grep '^[+-]' | wc -l) + + # If there are no non-version changes, return true (1) + if [ "$NON_VERSION_CHANGES" -eq 0; then + return 0 + fi + return 1 + } + # Compare checksums and create PR if needed if [ "$NEW_CHECKSUM" != "$OLD_CHECKSUM" ]; then # Check for new package or source changes @@ -276,9 +294,20 @@ jobs: fi if [ -n "$SOURCE_FILES_CHANGED" ]; then - echo "Source files changed:" - echo "$SOURCE_FILES_CHANGED" - SHOULD_BUMP=true + # If only package.json changed, check if it's only version changes + if [ "$(echo "$SOURCE_FILES_CHANGED" | wc -l)" -eq 1 ] && [ "$(echo "$SOURCE_FILES_CHANGED" | grep "package.json$")" ]; then + if check_package_json_changes; then + echo "Only version field changed in package.json, skipping version bump" + SHOULD_BUMP=false + else + echo "Package.json has meaningful changes" + SHOULD_BUMP=true + fi + else + echo "Source files changed:" + echo "$SOURCE_FILES_CHANGED" + SHOULD_BUMP=true + fi else echo "No source files changed" # Update checksum even if we don't bump version, but only if not on default branch From caac156565b29960fc8628f1c81f0614fdade4f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Tue, 11 Feb 2025 19:31:23 +0700 Subject: [PATCH 2/8] chore: update jotai and related dependencies to latest versions --- apps/external/package.json | 4 +- apps/mira/package.json | 2 +- apps/nova/package.json | 2 +- apps/web/package.json | 2 +- packages/supabase/src/next/server.ts | 3 +- pnpm-lock.yaml | 85 ++++++++++++++++------------ 6 files changed, 56 insertions(+), 42 deletions(-) diff --git a/apps/external/package.json b/apps/external/package.json index 4fa6e63fc9..8667480bb3 100644 --- a/apps/external/package.json +++ b/apps/external/package.json @@ -10,9 +10,9 @@ }, "dependencies": { "@tutur3u/ai": "^0.0.9", - "@tutur3u/eslint-config": "^0.1.1", + "@tutur3u/eslint-config": "^0.1.2", "@tutur3u/supabase": "^0.0.3", - "@tutur3u/types": "^0.1.1", + "@tutur3u/types": "^0.1.3", "@tutur3u/typescript-config": "^0.1.0", "@tutur3u/ui": "^0.0.3", "next": "15.1.6", diff --git a/apps/mira/package.json b/apps/mira/package.json index 59918ed7a7..e7a6c5ef4a 100644 --- a/apps/mira/package.json +++ b/apps/mira/package.json @@ -108,7 +108,7 @@ "framer-motion": "^12.4.2", "highlight.js": "^11.11.1", "html2canvas": "^1.4.1", - "jotai": "^2.11.3", + "jotai": "^2.12.0", "juice": "^11.0.0", "lodash": "^4.17.21", "lowlight": "^3.3.0", diff --git a/apps/nova/package.json b/apps/nova/package.json index 9de8df6461..10a0dcf1d8 100644 --- a/apps/nova/package.json +++ b/apps/nova/package.json @@ -108,7 +108,7 @@ "framer-motion": "^12.4.2", "highlight.js": "^11.11.1", "html2canvas": "^1.4.1", - "jotai": "^2.11.3", + "jotai": "^2.12.0", "juice": "^11.0.0", "lodash": "^4.17.21", "lowlight": "^3.3.0", diff --git a/apps/web/package.json b/apps/web/package.json index 43946ee0e4..e04ba911e4 100644 --- a/apps/web/package.json +++ b/apps/web/package.json @@ -116,7 +116,7 @@ "framer-motion": "^12.4.2", "highlight.js": "^11.11.1", "html2canvas": "^1.4.1", - "jotai": "^2.11.3", + "jotai": "^2.12.0", "juice": "^11.0.0", "lodash": "^4.17.21", "lowlight": "^3.3.0", diff --git a/packages/supabase/src/next/server.ts b/packages/supabase/src/next/server.ts index 82dfcded35..611cc7da66 100644 --- a/packages/supabase/src/next/server.ts +++ b/packages/supabase/src/next/server.ts @@ -1,9 +1,10 @@ import { SupabaseCookie, checkEnvVariables } from './common'; import { createServerClient } from '@supabase/ssr'; import { Database } from '@tutur3u/types/supabase'; +import type { ReadonlyRequestCookies } from 'next/dist/server/web/spec-extension/adapters/request-cookies'; import { cookies } from 'next/headers'; -function createCookieHandler(cookieStore: any) { +function createCookieHandler(cookieStore: ReadonlyRequestCookies) { return { getAll() { return cookieStore.getAll(); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 45b85920b6..e69ec67b71 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -57,14 +57,14 @@ importers: specifier: ^0.0.9 version: 0.0.9(@opentelemetry/api@1.9.0)(jiti@2.4.2) '@tutur3u/eslint-config': - specifier: ^0.1.1 - version: 0.1.1 + specifier: ^0.1.2 + version: 0.1.2 '@tutur3u/supabase': specifier: ^0.0.3 version: 0.0.3(@opentelemetry/api@1.9.0)(jiti@2.4.2) '@tutur3u/types': - specifier: ^0.1.1 - version: 0.1.1(jiti@2.4.2) + specifier: ^0.1.3 + version: 0.1.3(jiti@2.4.2) '@tutur3u/typescript-config': specifier: ^0.1.0 version: 0.1.0 @@ -320,10 +320,10 @@ importers: version: 2.7.21 '@vercel/analytics': specifier: ^1.5.0 - version: 1.5.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.5.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) '@vercel/speed-insights': specifier: ^1.2.0 - version: 1.2.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.2.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) babel-plugin-react-compiler: specifier: 19.0.0-beta-55955c9-20241229 version: 19.0.0-beta-55955c9-20241229 @@ -352,8 +352,8 @@ importers: specifier: ^1.4.1 version: 1.4.1 jotai: - specifier: ^2.11.3 - version: 2.11.3(@types/react@19.0.8)(react@19.0.0) + specifier: ^2.12.0 + version: 2.12.0(@types/react@19.0.8)(react@19.0.0) juice: specifier: ^11.0.0 version: 11.0.0 @@ -377,10 +377,10 @@ importers: version: 1.0.0 next: specifier: ^15.1.7 - version: 15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + version: 15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) next-intl: specifier: ^3.26.3 - version: 3.26.3(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 3.26.3(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) next-themes: specifier: ^0.4.4 version: 0.4.4(react-dom@19.0.0(react@19.0.0))(react@19.0.0) @@ -786,10 +786,10 @@ importers: version: 2.7.21 '@vercel/analytics': specifier: ^1.5.0 - version: 1.5.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.5.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) '@vercel/speed-insights': specifier: ^1.2.0 - version: 1.2.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.2.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) babel-plugin-react-compiler: specifier: 19.0.0-beta-55955c9-20241229 version: 19.0.0-beta-55955c9-20241229 @@ -818,8 +818,8 @@ importers: specifier: ^1.4.1 version: 1.4.1 jotai: - specifier: ^2.11.3 - version: 2.11.3(@types/react@19.0.8)(react@19.0.0) + specifier: ^2.12.0 + version: 2.12.0(@types/react@19.0.8)(react@19.0.0) juice: specifier: ^11.0.0 version: 11.0.0 @@ -843,10 +843,10 @@ importers: version: 1.0.0 next: specifier: ^15.1.7 - version: 15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + version: 15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) next-intl: specifier: ^3.26.3 - version: 3.26.3(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 3.26.3(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) next-themes: specifier: ^0.4.4 version: 0.4.4(react-dom@19.0.0(react@19.0.0))(react@19.0.0) @@ -1516,8 +1516,8 @@ importers: specifier: ^1.4.1 version: 1.4.1 jotai: - specifier: ^2.11.3 - version: 2.11.3(@types/react@19.0.8)(react@19.0.0) + specifier: ^2.12.0 + version: 2.12.0(@types/react@19.0.8)(react@19.0.0) juice: specifier: ^11.0.0 version: 11.0.0 @@ -5003,8 +5003,8 @@ packages: '@tutur3u/ai@0.0.9': resolution: {integrity: sha512-IwdXN5ms1ZPsP0abKM2B5LvqC8YGn65H2cpelnwO4tsYRDM0Y+jLLs8559AiFUJ39qI2HVdwq8yra/qnpzo3HA==} - '@tutur3u/eslint-config@0.1.1': - resolution: {integrity: sha512-h/cybM4W9LnXNkBD93gwvdw4Bp2cRHPMuldihZ9WM63y5kxxPLszEWdJzpEC7AnmnFce4zX4h9fWjlA72RbaBA==} + '@tutur3u/eslint-config@0.1.2': + resolution: {integrity: sha512-5FjEpJlEXrQJX37lVfeFm2TpWYj2ffEmgTkCC3Iat3++ifG6eb96p0S0ak8HLc4TQnghOu9sqXklHdPTW/farQ==} '@tutur3u/supabase@0.0.3': resolution: {integrity: sha512-FucnY3k5rf8eA81W27P/wRq4Mpe7b0wcluwGuH2dDWw8C34MbxlcRhe4jF0gkEkHPcZvtcInOPk8acr57pclMA==} @@ -5012,6 +5012,9 @@ packages: '@tutur3u/types@0.1.1': resolution: {integrity: sha512-m45JJNEicEaq0P/49IqHHPeMNHb0B34Ez7wiHywpkeb9Sxx1oZNQphAa2bHjWIaqUvqK0I/0kVFlN/SlqfOT7w==} + '@tutur3u/types@0.1.3': + resolution: {integrity: sha512-puz1ZUdLTSiL5ONuQvkPsahvBgrP6oX8cTlR8gSsmhhJacXKOHzgY5ASBK0OP5KFd1LR4W0V8FLUjTb4gWjbIg==} + '@tutur3u/typescript-config@0.1.0': resolution: {integrity: sha512-tyjyaEg1r0rRTBcoHJEmIEV0+14R/ISV/oVKj0h/jUyC7breM39uECnpuKtPxbRvdQcmTHm51eZapW/H0Rvy8Q==} @@ -6640,8 +6643,8 @@ packages: flatted@3.3.2: resolution: {integrity: sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA==} - for-each@0.3.4: - resolution: {integrity: sha512-kKaIINnFpzW6ffJNDjjyjrk21BkDx38c0xa/klsT8VzLCaMEefv4ZTacrcVR4DmgTeBra++jMDAfS/tS799YDw==} + for-each@0.3.5: + resolution: {integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==} engines: {node: '>= 0.4'} foreground-child@3.3.0: @@ -7225,8 +7228,8 @@ packages: jose@5.9.6: resolution: {integrity: sha512-AMlnetc9+CV9asI19zHmrgS/WYsWUwCn2R7RzlbJWD7F9eWYUTGyBmU9o6PxngtLGOiDGPRu+Uc4fhKzbpteZQ==} - jotai@2.11.3: - resolution: {integrity: sha512-B/PsewAQ0UOS5e2+TTWegUPQ3SCLPCjPY24LYUjfn2EorGlluTA2dFjVLgF1+xHLjK9Jit3y5mKHyMG3Xq/GZg==} + jotai@2.12.0: + resolution: {integrity: sha512-j5B4NmUw8gbuN7AG4NufWw00rfpm6hexL2CVhKD7juoP2YyD9FEUV5ar921JMvadyrxQhU1NpuKUL3QfsAlVpA==} engines: {node: '>=12.20.0'} peerDependencies: '@types/react': '>=17.0.0' @@ -9520,8 +9523,8 @@ packages: resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} engines: {node: '>=18'} - whatwg-url@14.1.0: - resolution: {integrity: sha512-jlf/foYIKywAt3x/XWKZ/3rz8OSJPiWktjmk891alJUEjiVxKX9LEO92qH3hv4aJ0mN3MWPvGMCy8jQi95xK4w==} + whatwg-url@14.1.1: + resolution: {integrity: sha512-mDGf9diDad/giZ/Sm9Xi2YcyzaFpbdLpJPr+E9fSkyQ7KpQD4SdFcugkRQYzhmfI4KeV4Qpnn2sKPdo+kmsgRQ==} engines: {node: '>=18'} whatwg-url@5.0.0: @@ -12783,7 +12786,7 @@ snapshots: - supports-color - utf-8-validate - '@tutur3u/eslint-config@0.1.1': {} + '@tutur3u/eslint-config@0.1.2': {} '@tutur3u/supabase@0.0.3(@opentelemetry/api@1.9.0)(jiti@2.4.2)': dependencies: @@ -12817,6 +12820,16 @@ snapshots: - jiti - supports-color + '@tutur3u/types@0.1.3(jiti@2.4.2)': + dependencies: + eslint: 9.20.0(jiti@2.4.2) + react: 19.0.0 + react-dom: 19.0.0(react@19.0.0) + zod: 3.24.1 + transitivePeerDependencies: + - jiti + - supports-color + '@tutur3u/typescript-config@0.1.0': {} '@tutur3u/ui@0.0.3(@types/react-dom@19.0.3(@types/react@19.0.8))(@types/react@19.0.8)(jiti@2.4.2)': @@ -14030,7 +14043,7 @@ snapshots: data-urls@5.0.0: dependencies: whatwg-mimetype: 4.0.0 - whatwg-url: 14.1.0 + whatwg-url: 14.1.1 data-view-buffer@1.0.2: dependencies: @@ -14776,7 +14789,7 @@ snapshots: flatted@3.3.2: {} - for-each@0.3.4: + for-each@0.3.5: dependencies: is-callable: 1.2.7 @@ -15427,7 +15440,7 @@ snapshots: jose@5.9.6: {} - jotai@2.11.3(@types/react@19.0.8)(react@19.0.0): + jotai@2.12.0(@types/react@19.0.8)(react@19.0.0): optionalDependencies: '@types/react': 19.0.8 react: 19.0.0 @@ -15460,7 +15473,7 @@ snapshots: webidl-conversions: 7.0.0 whatwg-encoding: 3.1.1 whatwg-mimetype: 4.0.0 - whatwg-url: 14.1.0 + whatwg-url: 14.1.1 ws: 8.18.0 xml-name-validator: 5.0.0 optionalDependencies: @@ -17914,7 +17927,7 @@ snapshots: typed-array-byte-length@1.0.3: dependencies: call-bind: 1.0.8 - for-each: 0.3.4 + for-each: 0.3.5 gopd: 1.2.0 has-proto: 1.2.0 is-typed-array: 1.1.15 @@ -17923,7 +17936,7 @@ snapshots: dependencies: available-typed-arrays: 1.0.7 call-bind: 1.0.8 - for-each: 0.3.4 + for-each: 0.3.5 gopd: 1.2.0 has-proto: 1.2.0 is-typed-array: 1.1.15 @@ -17932,7 +17945,7 @@ snapshots: typed-array-length@1.0.7: dependencies: call-bind: 1.0.8 - for-each: 0.3.4 + for-each: 0.3.5 gopd: 1.2.0 is-typed-array: 1.1.15 possible-typed-array-names: 1.1.0 @@ -18264,7 +18277,7 @@ snapshots: whatwg-mimetype@4.0.0: {} - whatwg-url@14.1.0: + whatwg-url@14.1.1: dependencies: tr46: 5.0.0 webidl-conversions: 7.0.0 @@ -18310,7 +18323,7 @@ snapshots: available-typed-arrays: 1.0.7 call-bind: 1.0.8 call-bound: 1.0.3 - for-each: 0.3.4 + for-each: 0.3.5 gopd: 1.2.0 has-tostringtag: 1.0.2 From bd987815d7b1d4ef22cc46bffbdc40fc113c2e95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Tue, 11 Feb 2025 23:17:56 +0700 Subject: [PATCH 3/8] refactor(crawlers): simplify data mapping and update link generation --- .../(dashboard)/[wsId]/crawlers/(default)/page.tsx | 8 +------- .../app/[locale]/(dashboard)/[wsId]/crawlers/columns.tsx | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/(default)/page.tsx b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/(default)/page.tsx index 53409e465f..d7e5a2c3cf 100644 --- a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/(default)/page.tsx +++ b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/(default)/page.tsx @@ -2,7 +2,6 @@ import { getColumns } from '../columns'; import ModelForm from '../form'; import { CustomDataTable } from '@/components/custom-data-table'; import { createClient } from '@tutur3u/supabase/next/server'; -import type { WorkspaceCrawler } from '@tutur3u/types/db'; import FeatureSummary from '@tutur3u/ui/custom/feature-summary'; import { Separator } from '@tutur3u/ui/separator'; import { getTranslations } from 'next-intl/server'; @@ -31,11 +30,6 @@ export default async function WorkspaceCrawlersPage({ const { locale, wsId } = await params; const { data, count } = await getData(wsId, await searchParams); - const crawlers = data.map((m) => ({ - ...m, - href: `/${wsId}/crawlers/${m.id}`, - })) as WorkspaceCrawler[]; - return ( <> ( From ee689e2244a9813bee1a66bfd868b5eb116f50e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Wed, 12 Feb 2025 01:43:29 +0700 Subject: [PATCH 4/8] chore: remove unused ThemeProvider component --- apps/nova/src/components/theme-provider.tsx | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 apps/nova/src/components/theme-provider.tsx diff --git a/apps/nova/src/components/theme-provider.tsx b/apps/nova/src/components/theme-provider.tsx deleted file mode 100644 index 7d128e0007..0000000000 --- a/apps/nova/src/components/theme-provider.tsx +++ /dev/null @@ -1,11 +0,0 @@ -'use client'; - -import { - ThemeProvider as NextThemesProvider, - ThemeProviderProps, -} from 'next-themes'; -import * as React from 'react'; - -export function ThemeProvider({ children, ...props }: ThemeProviderProps) { - return {children}; -} From f151587f79278889551a723c23a6c027cd775084 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Wed, 12 Feb 2025 02:08:52 +0700 Subject: [PATCH 5/8] refactor(providers): simplify theme options in ThemeProvider --- apps/mira/src/components/providers.tsx | 20 +------------------- apps/rewise/src/components/providers.tsx | 20 +------------------- apps/web/src/components/providers.tsx | 20 +------------------- 3 files changed, 3 insertions(+), 57 deletions(-) diff --git a/apps/mira/src/components/providers.tsx b/apps/mira/src/components/providers.tsx index 144b162322..0fa48f1f33 100644 --- a/apps/mira/src/components/providers.tsx +++ b/apps/mira/src/components/providers.tsx @@ -6,25 +6,7 @@ export function Providers({ children }: { children: ReactNode }) { return ( diff --git a/apps/rewise/src/components/providers.tsx b/apps/rewise/src/components/providers.tsx index 10be897cb4..6821e764e9 100644 --- a/apps/rewise/src/components/providers.tsx +++ b/apps/rewise/src/components/providers.tsx @@ -7,25 +7,7 @@ export function Providers({ children }: { children: ReactNode }) { diff --git a/apps/web/src/components/providers.tsx b/apps/web/src/components/providers.tsx index 144b162322..0fa48f1f33 100644 --- a/apps/web/src/components/providers.tsx +++ b/apps/web/src/components/providers.tsx @@ -6,25 +6,7 @@ export function Providers({ children }: { children: ReactNode }) { return ( From 8b378173e1b704b64aef98e500dd711eb0f2e567 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Wed, 12 Feb 2025 02:11:45 +0700 Subject: [PATCH 6/8] feat(crawlers): add crawling functionality with API integration and UI components --- .../20250210132933_new_migration.sql | 151 +++++++++- apps/web/.env.example | 5 + .../crawlers/[crawlerId]/crawl-button.tsx | 48 +++ .../crawlers/[crawlerId]/crawler-content.tsx | 193 ++++++++++++ .../[wsId]/crawlers/[crawlerId]/page.tsx | 42 ++- .../[wsId]/crawlers/[crawlerId]/utils.ts | 52 ++++ .../api/v1/workspaces/[wsId]/crawl/route.ts | 275 ++++++++++++++++++ .../[wsId]/crawlers/status/route.ts | 53 ++++ packages/types/src/supabase.ts | 45 +++ pnpm-lock.yaml | 16 +- turbo.json | 1 + 11 files changed, 857 insertions(+), 24 deletions(-) create mode 100644 apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawl-button.tsx create mode 100644 apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawler-content.tsx create mode 100644 apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/utils.ts create mode 100644 apps/web/src/app/api/v1/workspaces/[wsId]/crawl/route.ts create mode 100644 apps/web/src/app/api/v1/workspaces/[wsId]/crawlers/status/route.ts diff --git a/apps/db/supabase/migrations/20250210132933_new_migration.sql b/apps/db/supabase/migrations/20250210132933_new_migration.sql index 6866b4fb2a..9b210d70df 100644 --- a/apps/db/supabase/migrations/20250210132933_new_migration.sql +++ b/apps/db/supabase/migrations/20250210132933_new_migration.sql @@ -124,4 +124,153 @@ create policy "Allow workspace members to have full permissions" on "public"."wo (ws.id = workspace_crawlers.ws_id) ) ) -); \ No newline at end of file +); + +create table "public"."crawled_url_next_urls" ( + "origin_id" uuid not null default gen_random_uuid(), + "url" text not null, + "skipped" boolean not null, + "created_at" timestamp with time zone not null default now() +); + +create table "public"."crawled_urls" ( + "id" uuid not null default gen_random_uuid(), + "url" text not null, + "html" text, + "markdown" text, + "created_at" timestamp with time zone not null default now() +); + +CREATE UNIQUE INDEX crawled_url_next_urls_pkey ON public.crawled_url_next_urls USING btree (origin_id, url); + +CREATE UNIQUE INDEX crawled_urls_pkey ON public.crawled_urls USING btree (id); + +alter table + "public"."crawled_url_next_urls" +add + constraint "crawled_url_next_urls_pkey" PRIMARY KEY using index "crawled_url_next_urls_pkey"; + +alter table + "public"."crawled_urls" +add + constraint "crawled_urls_pkey" PRIMARY KEY using index "crawled_urls_pkey"; + +grant delete on table "public"."crawled_url_next_urls" to "anon"; + +grant +insert + on table "public"."crawled_url_next_urls" to "anon"; + +grant references on table "public"."crawled_url_next_urls" to "anon"; + +grant +select + on table "public"."crawled_url_next_urls" to "anon"; + +grant trigger on table "public"."crawled_url_next_urls" to "anon"; + +grant truncate on table "public"."crawled_url_next_urls" to "anon"; + +grant +update + on table "public"."crawled_url_next_urls" to "anon"; + +grant delete on table "public"."crawled_url_next_urls" to "authenticated"; + +grant +insert + on table "public"."crawled_url_next_urls" to "authenticated"; + +grant references on table "public"."crawled_url_next_urls" to "authenticated"; + +grant +select + on table "public"."crawled_url_next_urls" to "authenticated"; + +grant trigger on table "public"."crawled_url_next_urls" to "authenticated"; + +grant truncate on table "public"."crawled_url_next_urls" to "authenticated"; + +grant +update + on table "public"."crawled_url_next_urls" to "authenticated"; + +grant delete on table "public"."crawled_url_next_urls" to "service_role"; + +grant +insert + on table "public"."crawled_url_next_urls" to "service_role"; + +grant references on table "public"."crawled_url_next_urls" to "service_role"; + +grant +select + on table "public"."crawled_url_next_urls" to "service_role"; + +grant trigger on table "public"."crawled_url_next_urls" to "service_role"; + +grant truncate on table "public"."crawled_url_next_urls" to "service_role"; + +grant +update + on table "public"."crawled_url_next_urls" to "service_role"; + +grant delete on table "public"."crawled_urls" to "anon"; + +grant +insert + on table "public"."crawled_urls" to "anon"; + +grant references on table "public"."crawled_urls" to "anon"; + +grant +select + on table "public"."crawled_urls" to "anon"; + +grant trigger on table "public"."crawled_urls" to "anon"; + +grant truncate on table "public"."crawled_urls" to "anon"; + +grant +update + on table "public"."crawled_urls" to "anon"; + +grant delete on table "public"."crawled_urls" to "authenticated"; + +grant +insert + on table "public"."crawled_urls" to "authenticated"; + +grant references on table "public"."crawled_urls" to "authenticated"; + +grant +select + on table "public"."crawled_urls" to "authenticated"; + +grant trigger on table "public"."crawled_urls" to "authenticated"; + +grant truncate on table "public"."crawled_urls" to "authenticated"; + +grant +update + on table "public"."crawled_urls" to "authenticated"; + +grant delete on table "public"."crawled_urls" to "service_role"; + +grant +insert + on table "public"."crawled_urls" to "service_role"; + +grant references on table "public"."crawled_urls" to "service_role"; + +grant +select + on table "public"."crawled_urls" to "service_role"; + +grant trigger on table "public"."crawled_urls" to "service_role"; + +grant truncate on table "public"."crawled_urls" to "service_role"; + +grant +update + on table "public"."crawled_urls" to "service_role"; \ No newline at end of file diff --git a/apps/web/.env.example b/apps/web/.env.example index 2333b5a50b..b09a36d2fa 100644 --- a/apps/web/.env.example +++ b/apps/web/.env.example @@ -28,3 +28,8 @@ MODAL_TOKEN_SECRET=YOUR_MODAL_TOKEN_SECRET CF_ACCOUNT_ID=YOUR_CF_ACCOUNT_ID CF_API_TOKEN=YOUR_CF_API_TOKEN + +# Infrastructure Credentials +SCRAPER_URL=YOUR_SCRAPER_URL +PROXY_API_KEY=YOUR_PROXY_API_KEY +NEXT_PUBLIC_PROXY_API_KEY=YOUR_NEXT_PUBLIC_PROXY_API_KEY \ No newline at end of file diff --git a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawl-button.tsx b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawl-button.tsx new file mode 100644 index 0000000000..57623bcbe5 --- /dev/null +++ b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawl-button.tsx @@ -0,0 +1,48 @@ +'use client'; + +import { Button } from '@tutur3u/ui/button'; +import { useState } from 'react'; + +export default function CrawlButton({ + wsId, + url, + onSuccess, +}: { + wsId: string; + url: string; + onSuccess?: () => void; +}) { + const [isLoading, setIsLoading] = useState(false); + + return ( + + ); +} diff --git a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawler-content.tsx b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawler-content.tsx new file mode 100644 index 0000000000..0d9593dc0b --- /dev/null +++ b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/crawler-content.tsx @@ -0,0 +1,193 @@ +'use client'; + +import CrawlButton from './crawl-button'; +import { formatHTML, unescapeMarkdownString } from './utils'; +import { MemoizedReactMarkdown } from '@/components/markdown'; +import { + Card, + CardContent, + CardDescription, + CardHeader, + CardTitle, +} from '@tutur3u/ui/card'; +import { Tabs, TabsContent, TabsList, TabsTrigger } from '@tutur3u/ui/tabs'; +import { formatDistance } from 'date-fns'; +import { useEffect, useState } from 'react'; + +interface CrawledUrl { + created_at: string; + html: string | null; + id: string; + markdown: string | null; + url: string; +} + +interface RelatedUrl { + created_at: string; + origin_id: string; + skipped: boolean; + url: string; +} + +export function CrawlerContent({ + initialCrawledUrl, + initialRelatedUrls, + wsId, + url, +}: { + initialCrawledUrl: CrawledUrl | null; + initialRelatedUrls: RelatedUrl[]; + wsId: string; + url: string; +}) { + const [crawledUrl, setCrawledUrl] = useState(initialCrawledUrl); + const [relatedUrls, setRelatedUrls] = useState(initialRelatedUrls); + const [isRefreshing, setIsRefreshing] = useState(false); + + const refreshData = async () => { + try { + setIsRefreshing(true); + const res = await fetch( + `/api/v1/workspaces/${wsId}/crawlers/status?url=${encodeURIComponent(url)}` + ); + if (!res.ok) return; + + const data = await res.json(); + setCrawledUrl(data.crawledUrl); + setRelatedUrls(data.relatedUrls || []); + } finally { + setIsRefreshing(false); + } + }; + + useEffect(() => { + if (crawledUrl) { + const interval = setInterval(refreshData, 5000); + return () => clearInterval(interval); + } + }, [crawledUrl]); + + return ( + <> + + +
+
+ Crawl Status + + {crawledUrl + ? `Last crawled ${formatDistance(new Date(crawledUrl.created_at), new Date(), { addSuffix: true })}` + : 'Not yet crawled'} + +
+ {isRefreshing && ( +
+ )} +
+ + {!crawledUrl && ( + + + + )} + + + {crawledUrl && ( + + + Crawled Content + + View the crawled content and discovered URLs + + + + + + Markdown + HTML + + URLs ({relatedUrls.length}) + + + +
+ {crawledUrl.markdown ? ( + + {unescapeMarkdownString( + JSON.parse(crawledUrl.markdown)?.text_content + )} + + ) : ( +

+ No markdown content available +

+ )} +
+
+ +
+ {crawledUrl.html ? ( +
+                      {formatHTML(crawledUrl.html)}
+                    
+ ) : ( +

+ No HTML content available +

+ )} +
+
+ +
+ {relatedUrls.length > 0 ? ( +
+ {relatedUrls.map((relatedUrl) => ( +
+
+
+ + {relatedUrl.url} + + + {relatedUrl.skipped ? 'Skipped' : 'Kept'} + +
+ {!relatedUrl.skipped && ( + + )} +
+
+ ))} +
+ ) : ( +
+

+ No URLs discovered +

+
+ )} +
+
+
+
+
+ )} + + ); +} diff --git a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/page.tsx b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/page.tsx index ffa94dcdb8..22abb9870d 100644 --- a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/page.tsx +++ b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/page.tsx @@ -1,5 +1,5 @@ +import { CrawlerContent } from './crawler-content'; import { createClient } from '@tutur3u/supabase/next/server'; -import { Card, CardDescription, CardHeader, CardTitle } from '@tutur3u/ui/card'; import { notFound } from 'next/navigation'; interface Props { @@ -11,33 +11,45 @@ interface Props { } export default async function DatasetDetailsPage({ params }: Props) { - const { crawlerId } = await params; - + const { wsId, crawlerId } = await params; const supabase = await createClient(); - const { data } = await supabase + + const { data: crawler } = await supabase .from('workspace_crawlers') .select('*') .eq('id', crawlerId) .single(); - if (!data) notFound(); + if (!crawler) notFound(); + + const { data: crawledUrl } = await supabase + .from('crawled_urls') + .select('*') + .eq('url', crawler.url) + .maybeSingle(); + + const { data: relatedUrls } = !crawledUrl + ? { data: [] } + : await supabase + .from('crawled_url_next_urls') + .select('*') + .eq('origin_id', crawledUrl.id) + .order('created_at', { ascending: false }); return ( -
+
-

{data.url}

+

{crawler.url}

- - - HTML Elements - - {data.html_ids?.length || 0} elements configured - - - +
); } diff --git a/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/utils.ts b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/utils.ts new file mode 100644 index 0000000000..5de4a1ca5c --- /dev/null +++ b/apps/web/src/app/[locale]/(dashboard)/[wsId]/crawlers/[crawlerId]/utils.ts @@ -0,0 +1,52 @@ +export function unescapeMarkdownString(str: string | null): string { + if (!str) return ''; + + const escapeMap: Record = { + '\\n': '\n', + '\\"': '"', + '\\t': '\t', + '\\r': '\r', + '\\\\': '\\', + "\\'": "'", + }; + + return Object.entries(escapeMap).reduce( + (acc, [escaped, unescaped]) => + acc.replace(new RegExp(escaped, 'g'), unescaped), + str + ); +} + +export function formatHTML(html: string | null): string { + if (!html) return ''; + + const indent = (level: number) => ' '.repeat(level); + let formatted = ''; + let depth = 0; + let inContent = false; + + for (let i = 0; i < html.length; i++) { + const char = html[i]; + + if (char === '<' && html[i + 1] !== '/') { + if (inContent) { + formatted += '\n' + indent(depth); + inContent = false; + } + depth++; + formatted += '\n' + indent(depth - 1) + char; + } else if (char === '<' && html[i + 1] === '/') { + depth--; + formatted += '\n' + indent(depth) + char; + } else if (char === '>') { + formatted += char; + if (html[i + 1] && html[i + 1] !== '<') { + inContent = true; + } + } else { + formatted += char; + } + } + + return formatted.trim(); +} diff --git a/apps/web/src/app/api/v1/workspaces/[wsId]/crawl/route.ts b/apps/web/src/app/api/v1/workspaces/[wsId]/crawl/route.ts new file mode 100644 index 0000000000..99ac76576e --- /dev/null +++ b/apps/web/src/app/api/v1/workspaces/[wsId]/crawl/route.ts @@ -0,0 +1,275 @@ +import { createAdminClient, createClient } from '@tutur3u/supabase/next/server'; +import { headers } from 'next/headers'; +import { NextRequest, NextResponse } from 'next/server'; + +interface Params { + params: Promise<{ + wsId: string; + }>; +} + +export async function POST(req: NextRequest, { params }: Params) { + const { wsId } = await params; + + const body = (await req.json()) as { url?: string | null }; + const { url } = body; + + if (!url) { + return NextResponse.json( + { message: 'Missing required parameter: url' }, + { status: 400 } + ); + } + + const apiKey = (await headers()).get('API_KEY'); + return apiKey + ? getDataWithApiKey(req, { url, wsId, apiKey }) + : getDataFromSession(req, { url, wsId }); +} + +async function getDataWithApiKey( + _: NextRequest, + { + url, + wsId, + apiKey, + }: { + url: string; + wsId: string; + apiKey: string; + } +) { + const sbAdmin = await createAdminClient(); + + const apiCheckQuery = sbAdmin + .from('workspace_api_keys') + .select('id') + .eq('ws_id', wsId) + .eq('value', apiKey) + .single(); + + const secretCheckQuery = sbAdmin + .from('workspace_secrets') + .select('*', { count: 'exact' }) + .eq('ws_id', wsId); + + const crawledUrlQuery = sbAdmin + .from('crawled_urls') + .select('*', { count: 'exact' }) + .eq('url', url) + .maybeSingle(); + + const [apiCheck, secretCheck, crawledUrlCheck] = await Promise.all([ + apiCheckQuery, + secretCheckQuery, + crawledUrlQuery, + ]); + + const { error: workspaceError } = apiCheck; + const { count: secretCount, error: secretError } = secretCheck; + const { count: crawledCount, error: crawledUrlError } = crawledUrlCheck; + + if (secretCount === 0) { + return NextResponse.json( + { message: 'Crawling is disabled for this workspace' }, + { status: 403 } + ); + } + + if (crawledCount !== 0) { + return NextResponse.json( + { message: 'URL already crawled' }, + { status: 400 } + ); + } + + if (workspaceError || secretError || crawledUrlError) { + console.log(workspaceError || secretError || crawledUrlError); + return NextResponse.json( + { message: 'Error fetching workspace crawlers' }, + { status: 500 } + ); + } + + // make POST request to SCRAPER_URL with ?url=${url} + const res = await fetch( + `${process.env.SCRAPER_URL}?url=${encodeURIComponent(url)}`, + { + headers: { + 'Content-Type': 'application/json', + }, + } + ); + + if (!res.ok) { + return NextResponse.json( + { message: 'Failed to crawl', status: res.status }, + { status: res.status } + ); + } + + const data = await res.json(); + + const { data: crawledUrl, error: crawledError } = await sbAdmin + .from('crawled_urls') + .insert({ + url, + html: data.html, + markdown: data.markdown, + }) + .select('id') + .single(); + + if (crawledError) { + console.error('Error inserting crawled URL:', crawledError); + return NextResponse.json( + { message: 'Error inserting crawled URL' }, + { status: 500 } + ); + } + + const { error: nextUrlsError } = await sbAdmin + .from('crawled_url_next_urls') + .insert([ + ...data.kept.map((nextUrl: string) => ({ + origin_id: crawledUrl.id, + url: nextUrl, + skipped: false, + })), + ...data.skipped.map((nextUrl: string) => ({ + origin_id: crawledUrl.id, + url: nextUrl, + skipped: true, + })), + ]); + + if (nextUrlsError) { + console.error('Error inserting next URLs:', nextUrlsError); + return NextResponse.json( + { message: 'Error inserting next URLs' }, + { status: 500 } + ); + } + + return NextResponse.json({ success: true }); +} + +async function getDataFromSession( + _: NextRequest, + { url, wsId }: { url: string; wsId: string } +) { + const supabase = await createClient(); + const sbAdmin = await createAdminClient(); + + const workspaceCheckQuery = supabase + .from('workspaces') + .select('*') + .eq('id', wsId); + + const secretCheckQuery = sbAdmin + .from('workspace_secrets') + .select('*', { count: 'exact' }) + .eq('ws_id', wsId) + .eq('name', 'ALLOW_CRAWLERS') + .eq('value', 'true') + .maybeSingle(); + const crawledUrlQuery = sbAdmin + .from('crawled_urls') + .select('*', { count: 'exact' }) + .eq('url', url) + .maybeSingle(); + + const [workspaceCheck, secretCheck, crawledUrlCheck] = await Promise.all([ + workspaceCheckQuery, + secretCheckQuery, + crawledUrlQuery, + ]); + + const { error: workspaceError } = workspaceCheck; + const { count: secretCount, error: secretError } = secretCheck; + const { count: crawledCount, error: crawledUrlError } = crawledUrlCheck; + + if (secretCount === 0) { + return NextResponse.json( + { message: 'Crawling is disabled for this workspace' }, + { status: 403 } + ); + } + + if (crawledCount !== 0) { + return NextResponse.json( + { message: 'URL already crawled' }, + { status: 400 } + ); + } + + if (workspaceError || secretError || crawledUrlError) { + console.log(workspaceError || secretError || crawledUrlError); + return NextResponse.json( + { message: 'Error fetching workspace crawlers' }, + { status: 500 } + ); + } + + // make POST request to SCRAPER_URL with ?url=${url} + const res = await fetch( + `${process.env.SCRAPER_URL}?url=${encodeURIComponent(url)}`, + { + headers: { + 'Content-Type': 'application/json', + }, + } + ); + + if (!res.ok) { + return NextResponse.json( + { message: 'Failed to crawl', status: res.status }, + { status: res.status } + ); + } + + const data = await res.json(); + + const { data: crawledUrl, error: crawledError } = await sbAdmin + .from('crawled_urls') + .insert({ + url, + html: data.html, + markdown: data.markdown, + }) + .select('id') + .single(); + + if (crawledError) { + console.error('Error inserting crawled URL:', crawledError); + return NextResponse.json( + { message: 'Error inserting crawled URL' }, + { status: 500 } + ); + } + + const { error: nextUrlsError } = await sbAdmin + .from('crawled_url_next_urls') + .insert([ + ...data.kept.map((nextUrl: string) => ({ + origin_id: crawledUrl.id, + url: nextUrl, + skipped: false, + })), + ...data.skipped.map((nextUrl: string) => ({ + origin_id: crawledUrl.id, + url: nextUrl, + skipped: true, + })), + ]); + + if (nextUrlsError) { + console.error('Error inserting next URLs:', nextUrlsError); + return NextResponse.json( + { message: 'Error inserting next URLs' }, + { status: 500 } + ); + } + + return NextResponse.json({ success: true }); +} diff --git a/apps/web/src/app/api/v1/workspaces/[wsId]/crawlers/status/route.ts b/apps/web/src/app/api/v1/workspaces/[wsId]/crawlers/status/route.ts new file mode 100644 index 0000000000..2f6e158d95 --- /dev/null +++ b/apps/web/src/app/api/v1/workspaces/[wsId]/crawlers/status/route.ts @@ -0,0 +1,53 @@ +import { createClient } from '@tutur3u/supabase/next/server'; +import { NextResponse } from 'next/server'; + +export async function GET(request: Request) { + const { searchParams } = new URL(request.url); + const url = searchParams.get('url'); + + if (!url) { + return NextResponse.json( + { message: 'Missing required parameter: url' }, + { status: 400 } + ); + } + + const supabase = await createClient(); + + const crawledUrlQuery = supabase + .from('crawled_urls') + .select('*') + .eq('url', url) + .maybeSingle(); + + const { data: crawledUrl, error: crawledUrlError } = await crawledUrlQuery; + + if (crawledUrlError) { + return NextResponse.json( + { message: 'Error fetching crawled URL' }, + { status: 500 } + ); + } + + if (!crawledUrl) { + return NextResponse.json({ crawledUrl: null, relatedUrls: [] }); + } + + const { data: relatedUrls, error: relatedUrlsError } = await supabase + .from('crawled_url_next_urls') + .select('*') + .eq('origin_id', crawledUrl.id) + .order('created_at', { ascending: false }); + + if (relatedUrlsError) { + return NextResponse.json( + { message: 'Error fetching related URLs' }, + { status: 500 } + ); + } + + return NextResponse.json({ + crawledUrl, + relatedUrls: relatedUrls || [], + }); +} diff --git a/packages/types/src/supabase.ts b/packages/types/src/supabase.ts index a578c012a2..7c0febb5b7 100644 --- a/packages/types/src/supabase.ts +++ b/packages/types/src/supabase.ts @@ -511,6 +511,51 @@ export type Database = { }, ]; }; + crawled_url_next_urls: { + Row: { + created_at: string; + origin_id: string; + skipped: boolean; + url: string; + }; + Insert: { + created_at?: string; + origin_id?: string; + skipped: boolean; + url: string; + }; + Update: { + created_at?: string; + origin_id?: string; + skipped?: boolean; + url?: string; + }; + Relationships: []; + }; + crawled_urls: { + Row: { + created_at: string; + html: string | null; + id: string; + markdown: string | null; + url: string; + }; + Insert: { + created_at?: string; + html?: string | null; + id?: string; + markdown?: string | null; + url: string; + }; + Update: { + created_at?: string; + html?: string | null; + id?: string; + markdown?: string | null; + url?: string; + }; + Relationships: []; + }; credit_wallets: { Row: { limit: number; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e69ec67b71..f9f8ddf61e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -320,10 +320,10 @@ importers: version: 2.7.21 '@vercel/analytics': specifier: ^1.5.0 - version: 1.5.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.5.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) '@vercel/speed-insights': specifier: ^1.2.0 - version: 1.2.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.2.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) babel-plugin-react-compiler: specifier: 19.0.0-beta-55955c9-20241229 version: 19.0.0-beta-55955c9-20241229 @@ -377,10 +377,10 @@ importers: version: 1.0.0 next: specifier: ^15.1.7 - version: 15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + version: 15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) next-intl: specifier: ^3.26.3 - version: 3.26.3(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 3.26.3(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) next-themes: specifier: ^0.4.4 version: 0.4.4(react-dom@19.0.0(react@19.0.0))(react@19.0.0) @@ -786,10 +786,10 @@ importers: version: 2.7.21 '@vercel/analytics': specifier: ^1.5.0 - version: 1.5.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.5.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) '@vercel/speed-insights': specifier: ^1.2.0 - version: 1.2.0(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 1.2.0(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) babel-plugin-react-compiler: specifier: 19.0.0-beta-55955c9-20241229 version: 19.0.0-beta-55955c9-20241229 @@ -843,10 +843,10 @@ importers: version: 1.0.0 next: specifier: ^15.1.7 - version: 15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) + version: 15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0) next-intl: specifier: ^3.26.3 - version: 3.26.3(next@15.1.7(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) + version: 3.26.3(next@15.1.7(@babel/core@7.26.8)(@opentelemetry/api@1.9.0)(babel-plugin-react-compiler@19.0.0-beta-55955c9-20241229)(react-dom@19.0.0(react@19.0.0))(react@19.0.0))(react@19.0.0) next-themes: specifier: ^0.4.4 version: 0.4.4(react-dom@19.0.0(react@19.0.0))(react@19.0.0) diff --git a/turbo.json b/turbo.json index 76da2c9b67..0adc7033a1 100644 --- a/turbo.json +++ b/turbo.json @@ -24,6 +24,7 @@ "CF_API_TOKEN", "NEXT_PUBLIC_PROXY_API_KEY", "PROXY_API_KEY", + "SCRAPER_URL", "BASE_URL", "API_URL", "ANALYZE", From 26fa4306270d5ec7e7d17e55074740d0658c228a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?V=C3=B5=20Ho=C3=A0ng=20Ph=C3=BAc?= Date: Wed, 12 Feb 2025 02:15:02 +0700 Subject: [PATCH 7/8] refactor(layout): update ThemeProvider to use next-themes with enhanced options --- .../src/app/[locale]/(dashboard)/[wsId]/layout.tsx | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/apps/nova/src/app/[locale]/(dashboard)/[wsId]/layout.tsx b/apps/nova/src/app/[locale]/(dashboard)/[wsId]/layout.tsx index acf31c2161..867214b7ae 100644 --- a/apps/nova/src/app/[locale]/(dashboard)/[wsId]/layout.tsx +++ b/apps/nova/src/app/[locale]/(dashboard)/[wsId]/layout.tsx @@ -1,7 +1,7 @@ import { Sidebar } from '@/components/layout/sidebar'; -import { ThemeProvider } from '@/components/theme-provider'; import { Toaster } from '@tutur3u/ui/toaster'; import type { Metadata } from 'next'; +import { ThemeProvider } from 'next-themes'; import { Inter } from 'next/font/google'; const inter = Inter({ subsets: ['latin'] }); @@ -18,7 +18,13 @@ export default function RootLayout({ }) { return (
- +
{children}
From b72ee1327ea39e002cf306ec6ad1646e7ea40684 Mon Sep 17 00:00:00 2001 From: Skora Date: Wed, 12 Feb 2025 02:30:50 +0700 Subject: [PATCH 8/8] Update .github/workflows/check-and-bump-versions.yaml Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- .github/workflows/check-and-bump-versions.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-and-bump-versions.yaml b/.github/workflows/check-and-bump-versions.yaml index cf558c6d70..1cba867821 100644 --- a/.github/workflows/check-and-bump-versions.yaml +++ b/.github/workflows/check-and-bump-versions.yaml @@ -273,7 +273,7 @@ jobs: NON_VERSION_CHANGES=$(echo "$DIFF" | grep -v '"version":' | grep '^[+-]' | wc -l) # If there are no non-version changes, return true (1) - if [ "$NON_VERSION_CHANGES" -eq 0; then + if [ "$NON_VERSION_CHANGES" -eq 0 ]; then return 0 fi return 1