diff --git a/.gitignore b/.gitignore index 933849d..87bd37b 100644 --- a/.gitignore +++ b/.gitignore @@ -38,4 +38,5 @@ tasks.json tasks/ .roo .taskmasterconfig -scripts \ No newline at end of file +scripts +projects-data-node1 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json deleted file mode 100644 index abc3d2d..0000000 --- a/.vscode/launch.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - // Use IntelliSense to learn about possible attributes. - // Hover to view descriptions of existing attributes. - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "Node Custom", - "type": "go", - "request": "launch", - "mode": "auto", - "program": "${workspaceFolder}", - "args": [ - "serve", - "--data=test-instance", - "--db=test.db" - ], - "env": { - "CHAINLAUNCH_USER": "admin", - "CHAINLAUNCH_PASSWORD": "admin123", - "JAVA_HOME": "/opt/homebrew/Cellar/openjdk/23.0.2" - } - }, - { - "name": "Node 1", - "type": "go", - "request": "launch", - "mode": "auto", - "program": "${workspaceFolder}", - "args": [ - "serve", - "--port=8100", - "--db=./data/chainlaunch.db", - ], - "env": { - "CHAINLAUNCH_USER": "admin", - "CHAINLAUNCH_PASSWORD": "admin", - "JAVA_HOME": "/opt/homebrew/opt/openjdk@21" - } - }, - { - "name": "Node 2", - "type": "go", - "request": "launch", - "mode": "auto", - "program": "${workspaceFolder}", - "args": [ - "serve", - "--port=8102", - "--db=./data/node2.db", - ], - "env": { - "CHAINLAUNCH_USER": "admin", - "CHAINLAUNCH_PASSWORD": "admin", - "JAVA_HOME": "/opt/homebrew/opt/openjdk@21" - } - } - ], - "compounds": [ - { - "name": "Launch All Nodes", - "configurations": [ - "Node 1", - "Node 2" - ] - } - ] -} \ No newline at end of file diff --git a/cmd/serve/serve.go b/cmd/serve/serve.go index 6b9e050..1561d69 100644 --- a/cmd/serve/serve.go +++ b/cmd/serve/serve.go @@ -31,6 +31,12 @@ import ( metricscommon "github.com/chainlaunch/chainlaunch/pkg/metrics/common" "github.com/chainlaunch/chainlaunch/pkg/monitoring" nodeTypes "github.com/chainlaunch/chainlaunch/pkg/nodes/types" + "github.com/chainlaunch/chainlaunch/pkg/scai/ai" + "github.com/chainlaunch/chainlaunch/pkg/scai/boilerplates" + "github.com/chainlaunch/chainlaunch/pkg/scai/dirs" + "github.com/chainlaunch/chainlaunch/pkg/scai/files" + "github.com/chainlaunch/chainlaunch/pkg/scai/projectrunner" + "github.com/chainlaunch/chainlaunch/pkg/scai/projects" "github.com/chainlaunch/chainlaunch/pkg/audit" "github.com/chainlaunch/chainlaunch/pkg/chainlaunchdeploy" @@ -48,10 +54,7 @@ import ( "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" "github.com/go-chi/cors" - "github.com/golang-migrate/migrate/v4" - "github.com/golang-migrate/migrate/v4/database/sqlite3" _ "github.com/golang-migrate/migrate/v4/source/file" - "github.com/golang-migrate/migrate/v4/source/iofs" _ "github.com/mattn/go-sqlite3" "github.com/spf13/cobra" httpSwagger "github.com/swaggo/http-swagger" @@ -283,7 +286,7 @@ func ensureKeyExists(filename string, dataPath string) (string, error) { } // setupServer configures and returns the HTTP server -func setupServer(queries *db.Queries, authService *auth.AuthService, views embed.FS, dev bool, dbPath string, dataPath string) *chi.Mux { +func (c *serveCmd) setupServer(queries *db.Queries, authService *auth.AuthService, views embed.FS, dev bool, dbPath string, dataPath string, projectsDir string) *chi.Mux { // Initialize services keyManagementService, err := service.NewKeyManagementService(queries) if err != nil { @@ -488,6 +491,51 @@ func setupServer(queries *db.Queries, authService *auth.AuthService, views embed notificationHandler := notificationhttp.NewNotificationHandler(notificationService) authHandler := auth.NewHandler(authService) auditHandler := audit.NewHandler(auditService, logger) + + // AI handlers + + var aiClient ai.AIClient + switch c.aiProvider { + case "anthropic", "claude": + if c.anthropicKey == "" { + log.Fatal("ANTHROPIC_API_KEY is not set and --anthropic-key not provided") + } + aiClient = ai.NewClaudeAdapter(c.anthropicKey) + case "openai": + if c.openaiKey == "" { + log.Fatal("OPENAI_API_KEY is not set and --openai-key not provided") + } + aiClient = ai.NewOpenAIAdapter(c.openaiKey) + default: + log.Fatalf("Unknown AI provider: %s", c.aiProvider) + } + + chatService := ai.NewChatService(queries) + openAIchatService := ai.NewOpenAIChatServiceWithClient(aiClient, logger, chatService, queries, projectsDir, c.aiModel) + + // Re-initialize projectsService + runner := projectrunner.NewRunner(queries) + projectsService, err := projects.NewProjectsService(queries, runner, projectsDir, organizationService, keyManagementService, networksService) + if err != nil { + log.Fatalf("Failed to create projects service: %v", err) + } + + // Register directory, file, and project handlers + dirsService := dirs.NewDirsService(projectsDir) + dirsHandler := dirs.NewDirsHandler(dirsService, projectsService) + filesService := files.NewFilesService() + filesHandler := files.NewFilesHandler(filesService, projectsService) + + // Create the project runner and inject into ProjectsService + projectsHandler := projects.NewProjectsHandler(projectsService, projectsDir) + + boilerplateService, err := boilerplates.NewBoilerplateService(queries) + if err != nil { + log.Fatalf("Failed to create boilerplate service: %v", err) + } + // Register AI API Gateway routes + aiHandler := ai.NewAIHandler(openAIchatService, chatService, projectsService, boilerplateService) + // Setup router r := chi.NewRouter() @@ -544,6 +592,16 @@ func setupServer(queries *db.Queries, authService *auth.AuthService, views embed // Register smart contract deployment routes scHandler.RegisterRoutes(r) + + // Mount directory management routes + dirsHandler.RegisterRoutes(r) + // Mount file management routes + filesHandler.RegisterRoutes(r) + // Mount project management routes + projectsHandler.RegisterRoutes(r) + // Mount AI/ML routes + aiHandler.RegisterRoutes(r) + }) }) r.Get("/api/swagger/*", httpSwagger.Handler( @@ -573,32 +631,6 @@ func setupServer(queries *db.Queries, authService *auth.AuthService, views embed return r } -func runMigrations(database *sql.DB, migrationsFS embed.FS) error { - driver, err := sqlite3.WithInstance(database, &sqlite3.Config{}) - if err != nil { - return fmt.Errorf("could not create sqlite driver: %v", err) - } - - // Use embedded migrations instead of file system - d, err := iofs.New(migrationsFS, "pkg/db/migrations") - if err != nil { - return fmt.Errorf("could not create iofs driver: %v", err) - } - - m, err := migrate.NewWithInstance( - "iofs", d, - "sqlite3", driver, - ) - if err != nil { - return fmt.Errorf("could not create migrate instance: %v", err) - } - if err := m.Up(); err != nil && err != migrate.ErrNoChange { - return fmt.Errorf("could not run migrations: %v", err) - } - - return nil -} - type serveCmd struct { logger *logger.Logger configCMD config.ConfigCMD @@ -609,8 +641,14 @@ type serveCmd struct { tlsKeyFile string dataPath string dev bool + projectsDir string queries *db.Queries + + openaiKey string + anthropicKey string + aiProvider string + aiModel string } // validate validates the serve command configuration @@ -672,7 +710,7 @@ func (c *serveCmd) preRun() error { log.Fatalf("Failed to open database: %v", err) } // Run migrations - if err := runMigrations(database, c.configCMD.MigrationsFS); err != nil { + if err := db.RunMigrations(database); err != nil { log.Fatalf("Failed to run migrations: %v", err) } @@ -756,7 +794,7 @@ func (c *serveCmd) run() error { } // Setup and start HTTP server - router := setupServer(c.queries, authService, c.configCMD.Views, c.dev, c.dbPath, c.dataPath) + router := c.setupServer(c.queries, authService, c.configCMD.Views, c.dev, c.dbPath, c.dataPath, c.projectsDir) // Start HTTP server in a goroutine httpServer := &http.Server{ @@ -822,6 +860,9 @@ For example: cmd.Flags().StringVar(&serveCmd.tlsCertFile, "tls-cert", "", "Path to TLS certificate file for HTTP server (required)") cmd.Flags().StringVar(&serveCmd.tlsKeyFile, "tls-key", "", "Path to TLS key file for HTTP server (required)") + // Add projects directory flag + cmd.Flags().StringVar(&serveCmd.projectsDir, "projects", "projects-data", "Path to projects directory") + // Update the default data path to use the OS-specific user config directory defaultDataPath := "" if configDir, err := os.UserConfigDir(); err == nil { @@ -837,5 +878,58 @@ For example: // Add development mode flag cmd.Flags().BoolVar(&serveCmd.dev, "dev", false, "Run in development mode") + // Add new flags + cmd.Flags().StringVar(&serveCmd.openaiKey, "openai-key", os.Getenv("OPENAI_API_KEY"), "OpenAI API key (or set OPENAI_API_KEY env var)") + cmd.Flags().StringVar(&serveCmd.anthropicKey, "anthropic-key", os.Getenv("ANTHROPIC_API_KEY"), "Anthropic API key (or set ANTHROPIC_API_KEY env var)") + cmd.Flags().StringVar(&serveCmd.aiProvider, "ai-provider", "openai", "AI provider to use: openai or anthropic") + cmd.Flags().StringVar(&serveCmd.aiModel, "ai-model", "gpt-4o", "AI model to use (e.g. gpt-4o, claude-3-opus-20240229)") + return cmd } + +// func (c *serveCmd) setupServer(queries *db.Queries, authService *auth.AuthService, views embed.FS, dev bool, dbPath string, dataPath string, projectsDir string) *chi.Mux { +// var aiClient ai.AIClient +// switch c.aiProvider { +// case "anthropic", "claude": +// if c.anthropicKey == "" { +// log.Fatal("ANTHROPIC_API_KEY is not set and --anthropic-key not provided") +// } +// aiClient = ai.NewClaudeAdapter(c.anthropicKey) +// case "openai": +// if c.openaiKey == "" { +// log.Fatal("OPENAI_API_KEY is not set and --openai-key not provided") +// } +// aiClient = ai.NewOpenAIAdapter(c.openaiKey) +// default: +// log.Fatalf("Unknown AI provider: %s", c.aiProvider) +// } + +// chatService := ai.NewChatService(queries) +// openAIchatService := ai.NewOpenAIChatServiceWithClient(aiClient, c.logger, chatService, queries, projectsDir, c.aiModel) + +// // Re-initialize projectsService +// runner := projectrunner.NewRunner(queries) +// projectsService, err := projects.NewProjectsService(queries, runner, projectsDir) +// if err != nil { +// log.Fatalf("Failed to create projects service: %v", err) +// } + +// // Register directory, file, and project handlers +// dirsService := dirs.NewDirsService(projectsDir) +// _ = dirs.NewDirsHandler(dirsService, projectsService) +// filesService := files.NewFilesService() +// _ = files.NewFilesHandler(filesService, projectsService) + +// // Create the project runner and inject into ProjectsService +// _ = projects.NewProjectsHandler(projectsService, projectsDir) + +// boilerplateService, err := boilerplates.NewBoilerplateService(queries) +// if err != nil { +// log.Fatalf("Failed to create boilerplate service: %v", err) +// } +// // Register AI API Gateway routes +// _ = ai.NewAIHandler(openAIchatService, chatService, projectsService, boilerplateService) + +// // Call the existing setupServer function with the correct parameters +// return setupServer(queries, authService, views, dev, dbPath, dataPath, projectsDir) +// } diff --git a/docs/docs.go b/docs/docs.go index cec06de..53c2f0f 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -1,4 +1,4 @@ -// Package docs Code generated by swaggo/swag at 2025-05-29 13:52:30.560832 +0200 CEST m=+5.013867459. DO NOT EDIT +// Package docs Code generated by swaggo/swag at 2025-06-11 10:19:10.404083 +0200 CEST m=+3.413638834. DO NOT EDIT package docs import "github.com/swaggo/swag" @@ -9,21 +9,1525 @@ const docTemplate = `{ "info": { "description": "{{escape .Description}}", "title": "{{.Title}}", - "termsOfService": "http://swagger.io/terms/", - "contact": { - "name": "API Support", - "url": "http://chainlaunch.dev/support", - "email": "support@chainlaunch.dev" + "contact": {}, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/api/v1/ai/boilerplates": { + "get": { + "description": "Returns a list of available boilerplates filtered by network platform", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get available boilerplates", + "parameters": [ + { + "type": "integer", + "description": "Network ID to filter boilerplates by platform", + "name": "network_id", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Boilerplate" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/generate": { + "post": { + "description": "Generates code based on the provided prompt and project context", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Generate code", + "parameters": [ + { + "description": "Generation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ai.GenerateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ai.GenerateResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/models": { + "get": { + "description": "Returns a list of available AI models for code generation", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get available AI models", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Model" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations": { + "get": { + "description": "Returns a list of all chat conversations associated with a specific project", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get all conversations for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.ConversationResponse" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations/{conversationId}": { + "get": { + "description": "Get all messages in a conversation", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get conversation messages", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Conversation ID", + "name": "conversationId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Message" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations/{conversationId}/export": { + "get": { + "description": "Get detailed information about a conversation including all messages and metadata", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get conversation detail", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Conversation ID", + "name": "conversationId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ai.ConversationDetail" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects": { + "get": { + "description": "Get a list of all projects", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "List all projects", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.ListProjectsResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + }, + "post": { + "description": "Create a new project, scaffold its directory, and store it in the DB", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Create a project", + "parameters": [ + { + "description": "Project info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/projects.CreateProjectRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/projects.CreateProjectResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}": { + "get": { + "description": "Get details of a project by its ID", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Get a project by ID", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.Project" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/commits": { + "get": { + "description": "Get a paginated list of commits for a project, including added/removed/modified files", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "List project commits with file changes", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Page number (default 1)", + "name": "page", + "in": "query" + }, + { + "type": "integer", + "description": "Page size (default 20)", + "name": "pageSize", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.CommitsListResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/commits/{commitHash}": { + "get": { + "description": "Get details for a single commit, including file changes", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Get commit details", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Commit hash", + "name": "commitHash", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.CommitDetailAPI" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/diff": { + "get": { + "description": "Get the diff of a file between two commits", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get file diff between two commits", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path (relative to project root)", + "name": "file", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "From commit hash", + "name": "from", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "To commit hash", + "name": "to", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "Diff", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/file_at_commit": { + "get": { + "description": "Get the contents of a file at a specific commit hash", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get file contents at a specific commit", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path (relative to project root)", + "name": "file", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Commit hash", + "name": "commit", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "File contents", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/logs": { + "get": { + "description": "Stream or return the logs for the project's running container", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get logs for a project server", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Logs", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/logs/stream": { + "get": { + "description": "Stream logs for the project's running container using SSE", + "produces": [ + "text/event-stream" + ], + "tags": [ + "projects" + ], + "summary": "Stream real-time logs for a project server", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "SSE stream of logs", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/start": { + "post": { + "description": "Start the server process for a given project using its boilerplate", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Start the server for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/stop": { + "post": { + "description": "Stop the server process for a given project", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Stop the server for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/create": { + "post": { + "description": "Create a new directory in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "Create a directory", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "description": "Directory create info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/dirs.CreateDirRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/dirs.CreateDirResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/delete": { + "delete": { + "description": "Delete a directory in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "Delete a directory", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Project name", + "name": "project", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Directory to delete, relative to project root", + "name": "dir", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/dirs.DeleteDirResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/list": { + "get": { + "description": "List files and directories in a given project and directory. Large directories (e.g., node_modules) are summarized/skipped.", + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "List files and directories", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Directory to list, relative to project root", + "name": "dir", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/dirs.ListEntriesResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/delete": { + "delete": { + "description": "Delete a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Delete a file", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path relative to project root", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.DeleteFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/entries": { + "get": { + "description": "List the full directory tree for a project, excluding large/ignored folders (e.g., node_modules, .git)", + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "List full project directory tree", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.DirectoryTreeNode" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } }, - "license": { - "name": "Apache 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0.html" + "/api/v1/projects/{projectId}/files/list": { + "get": { + "description": "List files in a given project and directory", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "List files", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Directory to list, relative to project root", + "name": "dir", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.ListFilesResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/read": { + "get": { + "description": "Get the contents of a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Read file contents", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path relative to project root", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.ReadFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/write": { + "post": { + "description": "Write or modify the contents of a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Write file contents", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "description": "File write info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/files.WriteFileRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/files.WriteFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } }, - "version": "{{.Version}}" - }, - "host": "{{.Host}}", - "basePath": "{{.BasePath}}", - "paths": { "/audit/logs": { "get": { "description": "Retrieves a paginated list of audit logs with optional filters", @@ -7368,9 +8872,119 @@ const docTemplate = `{ } } } - } - }, - "definitions": { + } + }, + "definitions": { + "ai.Boilerplate": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "platform": { + "type": "string" + } + } + }, + "ai.ConversationDetail": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "messages": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Message" + } + }, + "projectId": { + "type": "integer" + }, + "startedAt": { + "type": "string" + } + } + }, + "ai.ConversationResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "projectId": { + "type": "integer" + }, + "startedAt": { + "type": "string" + } + } + }, + "ai.GenerateRequest": { + "type": "object", + "properties": { + "projectId": { + "type": "integer" + }, + "prompt": { + "type": "string" + } + } + }, + "ai.GenerateResponse": { + "type": "object", + "properties": { + "code": { + "type": "string" + } + } + }, + "ai.Message": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "conversationId": { + "type": "integer" + }, + "createdAt": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "sender": { + "type": "string" + }, + "toolCalls": { + "type": "array", + "items": { + "$ref": "#/definitions/db.ToolCall" + } + } + } + }, + "ai.Model": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "maxTokens": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + }, "audit.Event": { "type": "object", "properties": { @@ -7993,17 +9607,7 @@ const docTemplate = `{ } }, "chainlaunchdeploy.DeployChaincodeByDefinitionRequest": { - "type": "object", - "properties": { - "container_port": { - "description": "Container port to use (optional, default 7052)", - "type": "string" - }, - "host_port": { - "description": "Host port to use (optional)", - "type": "string" - } - } + "type": "object" }, "chainlaunchdeploy.DeploymentResult": { "type": "object", @@ -8371,103 +9975,274 @@ const docTemplate = `{ } } }, - "chainlaunchdeploy.ListChaincodesResponse": { + "chainlaunchdeploy.ListChaincodesResponse": { + "type": "object", + "properties": { + "chaincodes": { + "type": "array", + "items": { + "$ref": "#/definitions/chainlaunchdeploy.ChaincodeResponse" + } + } + } + }, + "chainlaunchdeploy.PeerStatus": { + "type": "object", + "properties": { + "definition_id": { + "type": "integer" + }, + "id": { + "type": "integer" + }, + "last_updated": { + "description": "ISO8601", + "type": "string" + }, + "peer_id": { + "type": "integer" + }, + "status": { + "type": "string" + } + } + }, + "chainlaunchdeploy.UpdateChaincodeDefinitionRequest": { + "type": "object", + "properties": { + "chaincode_address": { + "description": "Chaincode address", + "type": "string" + }, + "docker_image": { + "description": "Docker image\nrequired: true", + "type": "string" + }, + "endorsement_policy": { + "description": "Endorsement policy", + "type": "string" + }, + "sequence": { + "description": "Sequence\nrequired: true", + "type": "integer" + }, + "version": { + "description": "Version\nrequired: true", + "type": "string" + } + } + }, + "common.QueryResult": { + "type": "object", + "properties": { + "data": { + "type": "object", + "properties": { + "result": { + "type": "array", + "items": { + "type": "object", + "properties": { + "metric": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "value": { + "description": "For instant queries", + "type": "array", + "items": {} + }, + "values": { + "description": "For range queries (matrix)", + "type": "array", + "items": { + "type": "array", + "items": {} + } + } + } + } + }, + "resultType": { + "type": "string" + } + } + }, + "status": { + "type": "string" + } + } + }, + "db.ToolCall": { + "type": "object", + "properties": { + "arguments": { + "type": "string" + }, + "createdAt": { + "type": "string" + }, + "error": { + "$ref": "#/definitions/sql.NullString" + }, + "id": { + "type": "integer" + }, + "messageId": { + "type": "integer" + }, + "result": { + "$ref": "#/definitions/sql.NullString" + }, + "toolName": { + "type": "string" + } + } + }, + "dirs.CreateDirRequest": { + "type": "object", + "properties": { + "dir": { + "type": "string", + "example": "newdir" + }, + "project": { + "type": "string", + "example": "myproject" + } + } + }, + "dirs.CreateDirResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "created" + } + } + }, + "dirs.DeleteDirResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "deleted" + } + } + }, + "dirs.ListEntriesResponse": { + "description": "Unified response for listing files and directories in a directory", + "type": "object", + "properties": { + "directories": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"src\"", + "\"docs\"]" + ] + }, + "files": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"main.go\"", + "\"README.md\"]" + ] + }, + "skipped": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"node_modules\"]" + ] + } + } + }, + "files.DeleteFileResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "deleted" + } + } + }, + "files.DirectoryTreeNode": { + "type": "object", + "properties": { + "children": { + "type": "array", + "items": { + "$ref": "#/definitions/files.DirectoryTreeNode" + } + }, + "isDir": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + } + } + }, + "files.ListFilesResponse": { "type": "object", "properties": { - "chaincodes": { + "files": { "type": "array", "items": { - "$ref": "#/definitions/chainlaunchdeploy.ChaincodeResponse" - } + "type": "string" + }, + "example": [ + "[\"main.go\"", + "\"README.md\"]" + ] } } }, - "chainlaunchdeploy.PeerStatus": { + "files.ReadFileResponse": { "type": "object", "properties": { - "definition_id": { - "type": "integer" - }, - "id": { - "type": "integer" - }, - "last_updated": { - "description": "ISO8601", - "type": "string" - }, - "peer_id": { - "type": "integer" - }, - "status": { - "type": "string" + "content": { + "type": "string", + "example": "file contents" } } }, - "chainlaunchdeploy.UpdateChaincodeDefinitionRequest": { + "files.WriteFileRequest": { "type": "object", "properties": { - "chaincode_address": { - "description": "Chaincode address", - "type": "string" - }, - "docker_image": { - "description": "Docker image\nrequired: true", - "type": "string" - }, - "endorsement_policy": { - "description": "Endorsement policy", - "type": "string" + "content": { + "type": "string", + "example": "new file contents" }, - "sequence": { - "description": "Sequence\nrequired: true", - "type": "integer" + "path": { + "type": "string", + "example": "main.go" }, - "version": { - "description": "Version\nrequired: true", - "type": "string" + "project": { + "type": "string", + "example": "myproject" } } }, - "common.QueryResult": { + "files.WriteFileResponse": { "type": "object", "properties": { - "data": { - "type": "object", - "properties": { - "result": { - "type": "array", - "items": { - "type": "object", - "properties": { - "metric": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "value": { - "description": "For instant queries", - "type": "array", - "items": {} - }, - "values": { - "description": "For range queries (matrix)", - "type": "array", - "items": { - "type": "array", - "items": {} - } - } - } - } - }, - "resultType": { - "type": "string" - } - } - }, "status": { - "type": "string" + "type": "string", + "example": "written" } } }, @@ -10856,6 +12631,200 @@ const docTemplate = `{ } } }, + "projects.CommitDetailAPI": { + "type": "object", + "properties": { + "added": { + "type": "array", + "items": { + "type": "string" + } + }, + "author": { + "type": "string" + }, + "hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "modified": { + "type": "array", + "items": { + "type": "string" + } + }, + "parent": { + "type": "string" + }, + "removed": { + "type": "array", + "items": { + "type": "string" + } + }, + "timestamp": { + "type": "string" + } + } + }, + "projects.CommitWithFileChangesAPI": { + "type": "object", + "properties": { + "added": { + "type": "array", + "items": { + "type": "string" + } + }, + "author": { + "type": "string" + }, + "hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "modified": { + "type": "array", + "items": { + "type": "string" + } + }, + "parent": { + "type": "string" + }, + "removed": { + "type": "array", + "items": { + "type": "string" + } + }, + "timestamp": { + "type": "string" + } + } + }, + "projects.CommitsListResponse": { + "type": "object", + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/definitions/projects.CommitWithFileChangesAPI" + } + } + } + }, + "projects.CreateProjectRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer", + "example": 1 + } + } + }, + "projects.CreateProjectResponse": { + "type": "object", + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "containerPort": { + "type": "integer" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "id": { + "type": "integer", + "example": 1 + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer" + }, + "slug": { + "type": "string", + "example": "myproject-abc12" + } + } + }, + "projects.ListProjectsResponse": { + "type": "object", + "properties": { + "projects": { + "type": "array", + "items": { + "$ref": "#/definitions/projects.Project" + } + } + } + }, + "projects.Project": { + "type": "object", + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "containerPort": { + "type": "integer" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "id": { + "type": "integer", + "example": 1 + }, + "lastStartedAt": { + "type": "string" + }, + "lastStoppedAt": { + "type": "string" + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer" + }, + "slug": { + "type": "string", + "example": "myproject-abc12" + }, + "status": { + "type": "string", + "example": "running" + } + } + }, "registry.PluginMetadata": { "type": "object", "properties": { @@ -11334,6 +13303,18 @@ const docTemplate = `{ } } }, + "sql.NullString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "description": "Valid is true if String is not NULL", + "type": "boolean" + } + } + }, "time.Duration": { "type": "integer", "enum": [ @@ -12029,97 +14010,17 @@ const docTemplate = `{ "KeyUsageDecipherOnly" ] } - }, - "securityDefinitions": { - "BasicAuth": { - "type": "basic" - }, - "CookieAuth": { - "type": "apiKey", - "name": "session_id", - "in": "cookie" - } - }, - "tags": [ - { - "description": "Audit management operations", - "name": "Audit" - }, - { - "description": "User authentication and authorization operations", - "name": "Authentication" - }, - { - "description": "Backup schedule configuration and management", - "name": "Backup Schedules" - }, - { - "description": "Backup target location configuration and management", - "name": "Backup Targets" - }, - { - "description": "Backup management operations", - "name": "Backups" - }, - { - "description": "Hyperledger Besu network management operations", - "name": "Besu Networks" - }, - { - "description": "Hyperledger Fabric network management operations", - "name": "Fabric Networks" - }, - { - "description": "Cryptographic key management operations", - "name": "Keys" - }, - { - "description": "Metrics management operations", - "name": "Metrics" - }, - { - "description": "Network node management operations", - "name": "Nodes" - }, - { - "description": "System notification configuration and management", - "name": "Notifications" - }, - { - "description": "Organization management operations", - "name": "Organizations" - }, - { - "description": "Plugin management operations", - "name": "Plugins" - }, - { - "description": "Key provider management operations", - "name": "Providers" - }, - { - "description": "Settings management operations", - "name": "Settings" - }, - { - "description": "Smart contract management operations", - "name": "SmartContracts" - }, - { - "description": "User account management operations", - "name": "Users" - } - ] + } }` // SwaggerInfo holds exported Swagger Info so clients can modify it var SwaggerInfo = &swag.Spec{ - Version: "1.0", - Host: "localhost:8100", - BasePath: "/api/v1", - Schemes: []string{"http", "https"}, - Title: "ChainLaunch API", - Description: "ChainLaunch API provides services for managing blockchain networks and cryptographic keys", + Version: "", + Host: "", + BasePath: "", + Schemes: []string{}, + Title: "", + Description: "", InfoInstanceName: "swagger", SwaggerTemplate: docTemplate, LeftDelim: "{{", diff --git a/docs/swagger.json b/docs/swagger.json index 19f0cbe..a721a1b 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -1,27 +1,1522 @@ { - "schemes": [ - "http", - "https" - ], "swagger": "2.0", "info": { - "description": "ChainLaunch API provides services for managing blockchain networks and cryptographic keys", - "title": "ChainLaunch API", - "termsOfService": "http://swagger.io/terms/", - "contact": { - "name": "API Support", - "url": "http://chainlaunch.dev/support", - "email": "support@chainlaunch.dev" - }, - "license": { - "name": "Apache 2.0", - "url": "http://www.apache.org/licenses/LICENSE-2.0.html" - }, - "version": "1.0" + "contact": {} }, - "host": "localhost:8100", - "basePath": "/api/v1", "paths": { + "/api/v1/ai/boilerplates": { + "get": { + "description": "Returns a list of available boilerplates filtered by network platform", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get available boilerplates", + "parameters": [ + { + "type": "integer", + "description": "Network ID to filter boilerplates by platform", + "name": "network_id", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Boilerplate" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/generate": { + "post": { + "description": "Generates code based on the provided prompt and project context", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Generate code", + "parameters": [ + { + "description": "Generation request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/ai.GenerateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ai.GenerateResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/models": { + "get": { + "description": "Returns a list of available AI models for code generation", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get available AI models", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Model" + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations": { + "get": { + "description": "Returns a list of all chat conversations associated with a specific project", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get all conversations for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.ConversationResponse" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations/{conversationId}": { + "get": { + "description": "Get all messages in a conversation", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get conversation messages", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Conversation ID", + "name": "conversationId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Message" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/ai/{projectId}/conversations/{conversationId}/export": { + "get": { + "description": "Get detailed information about a conversation including all messages and metadata", + "produces": [ + "application/json" + ], + "tags": [ + "ai" + ], + "summary": "Get conversation detail", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Conversation ID", + "name": "conversationId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/ai.ConversationDetail" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects": { + "get": { + "description": "Get a list of all projects", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "List all projects", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.ListProjectsResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + }, + "post": { + "description": "Create a new project, scaffold its directory, and store it in the DB", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Create a project", + "parameters": [ + { + "description": "Project info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/projects.CreateProjectRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/projects.CreateProjectResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}": { + "get": { + "description": "Get details of a project by its ID", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Get a project by ID", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.Project" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/commits": { + "get": { + "description": "Get a paginated list of commits for a project, including added/removed/modified files", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "List project commits with file changes", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "integer", + "description": "Page number (default 1)", + "name": "page", + "in": "query" + }, + { + "type": "integer", + "description": "Page size (default 20)", + "name": "pageSize", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.CommitsListResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/commits/{commitHash}": { + "get": { + "description": "Get details for a single commit, including file changes", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Get commit details", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Commit hash", + "name": "commitHash", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/projects.CommitDetailAPI" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/diff": { + "get": { + "description": "Get the diff of a file between two commits", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get file diff between two commits", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path (relative to project root)", + "name": "file", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "From commit hash", + "name": "from", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "To commit hash", + "name": "to", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "Diff", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/file_at_commit": { + "get": { + "description": "Get the contents of a file at a specific commit hash", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get file contents at a specific commit", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path (relative to project root)", + "name": "file", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Commit hash", + "name": "commit", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "File contents", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/logs": { + "get": { + "description": "Stream or return the logs for the project's running container", + "produces": [ + "text/plain" + ], + "tags": [ + "projects" + ], + "summary": "Get logs for a project server", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Logs", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/logs/stream": { + "get": { + "description": "Stream logs for the project's running container using SSE", + "produces": [ + "text/event-stream" + ], + "tags": [ + "projects" + ], + "summary": "Stream real-time logs for a project server", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "SSE stream of logs", + "schema": { + "type": "string" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/start": { + "post": { + "description": "Start the server process for a given project using its boilerplate", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Start the server for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{id}/stop": { + "post": { + "description": "Stop the server process for a given project", + "produces": [ + "application/json" + ], + "tags": [ + "projects" + ], + "summary": "Stop the server for a project", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/create": { + "post": { + "description": "Create a new directory in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "Create a directory", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "description": "Directory create info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/dirs.CreateDirRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/dirs.CreateDirResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/delete": { + "delete": { + "description": "Delete a directory in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "Delete a directory", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Project name", + "name": "project", + "in": "query", + "required": true + }, + { + "type": "string", + "description": "Directory to delete, relative to project root", + "name": "dir", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/dirs.DeleteDirResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/dirs/list": { + "get": { + "description": "List files and directories in a given project and directory. Large directories (e.g., node_modules) are summarized/skipped.", + "produces": [ + "application/json" + ], + "tags": [ + "directories" + ], + "summary": "List files and directories", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Directory to list, relative to project root", + "name": "dir", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/dirs.ListEntriesResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/delete": { + "delete": { + "description": "Delete a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Delete a file", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path relative to project root", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.DeleteFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/entries": { + "get": { + "description": "List the full directory tree for a project, excluding large/ignored folders (e.g., node_modules, .git)", + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "List full project directory tree", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.DirectoryTreeNode" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/list": { + "get": { + "description": "List files in a given project and directory", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "List files", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Directory to list, relative to project root", + "name": "dir", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.ListFilesResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/read": { + "get": { + "description": "Get the contents of a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Read file contents", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "File path relative to project root", + "name": "path", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/files.ReadFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, + "/api/v1/projects/{projectId}/files/write": { + "post": { + "description": "Write or modify the contents of a file in a project", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "files" + ], + "summary": "Write file contents", + "parameters": [ + { + "type": "integer", + "description": "Project ID", + "name": "projectId", + "in": "path", + "required": true + }, + { + "description": "File write info", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/files.WriteFileRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/files.WriteFileResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "401": { + "description": "Unauthorized", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "403": { + "description": "Forbidden", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "404": { + "description": "Not Found", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "409": { + "description": "Conflict", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "422": { + "description": "Unprocessable Entity", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "$ref": "#/definitions/response.ErrorResponse" + } + } + } + } + }, "/audit/logs": { "get": { "description": "Retrieves a paginated list of audit logs with optional filters", @@ -7366,9 +8861,119 @@ } } } - } - }, - "definitions": { + } + }, + "definitions": { + "ai.Boilerplate": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "platform": { + "type": "string" + } + } + }, + "ai.ConversationDetail": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "messages": { + "type": "array", + "items": { + "$ref": "#/definitions/ai.Message" + } + }, + "projectId": { + "type": "integer" + }, + "startedAt": { + "type": "string" + } + } + }, + "ai.ConversationResponse": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "projectId": { + "type": "integer" + }, + "startedAt": { + "type": "string" + } + } + }, + "ai.GenerateRequest": { + "type": "object", + "properties": { + "projectId": { + "type": "integer" + }, + "prompt": { + "type": "string" + } + } + }, + "ai.GenerateResponse": { + "type": "object", + "properties": { + "code": { + "type": "string" + } + } + }, + "ai.Message": { + "type": "object", + "properties": { + "content": { + "type": "string" + }, + "conversationId": { + "type": "integer" + }, + "createdAt": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "sender": { + "type": "string" + }, + "toolCalls": { + "type": "array", + "items": { + "$ref": "#/definitions/db.ToolCall" + } + } + } + }, + "ai.Model": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "maxTokens": { + "type": "integer" + }, + "name": { + "type": "string" + } + } + }, "audit.Event": { "type": "object", "properties": { @@ -7991,17 +9596,7 @@ } }, "chainlaunchdeploy.DeployChaincodeByDefinitionRequest": { - "type": "object", - "properties": { - "container_port": { - "description": "Container port to use (optional, default 7052)", - "type": "string" - }, - "host_port": { - "description": "Host port to use (optional)", - "type": "string" - } - } + "type": "object" }, "chainlaunchdeploy.DeploymentResult": { "type": "object", @@ -8369,103 +9964,274 @@ } } }, - "chainlaunchdeploy.ListChaincodesResponse": { + "chainlaunchdeploy.ListChaincodesResponse": { + "type": "object", + "properties": { + "chaincodes": { + "type": "array", + "items": { + "$ref": "#/definitions/chainlaunchdeploy.ChaincodeResponse" + } + } + } + }, + "chainlaunchdeploy.PeerStatus": { + "type": "object", + "properties": { + "definition_id": { + "type": "integer" + }, + "id": { + "type": "integer" + }, + "last_updated": { + "description": "ISO8601", + "type": "string" + }, + "peer_id": { + "type": "integer" + }, + "status": { + "type": "string" + } + } + }, + "chainlaunchdeploy.UpdateChaincodeDefinitionRequest": { + "type": "object", + "properties": { + "chaincode_address": { + "description": "Chaincode address", + "type": "string" + }, + "docker_image": { + "description": "Docker image\nrequired: true", + "type": "string" + }, + "endorsement_policy": { + "description": "Endorsement policy", + "type": "string" + }, + "sequence": { + "description": "Sequence\nrequired: true", + "type": "integer" + }, + "version": { + "description": "Version\nrequired: true", + "type": "string" + } + } + }, + "common.QueryResult": { + "type": "object", + "properties": { + "data": { + "type": "object", + "properties": { + "result": { + "type": "array", + "items": { + "type": "object", + "properties": { + "metric": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "value": { + "description": "For instant queries", + "type": "array", + "items": {} + }, + "values": { + "description": "For range queries (matrix)", + "type": "array", + "items": { + "type": "array", + "items": {} + } + } + } + } + }, + "resultType": { + "type": "string" + } + } + }, + "status": { + "type": "string" + } + } + }, + "db.ToolCall": { + "type": "object", + "properties": { + "arguments": { + "type": "string" + }, + "createdAt": { + "type": "string" + }, + "error": { + "$ref": "#/definitions/sql.NullString" + }, + "id": { + "type": "integer" + }, + "messageId": { + "type": "integer" + }, + "result": { + "$ref": "#/definitions/sql.NullString" + }, + "toolName": { + "type": "string" + } + } + }, + "dirs.CreateDirRequest": { + "type": "object", + "properties": { + "dir": { + "type": "string", + "example": "newdir" + }, + "project": { + "type": "string", + "example": "myproject" + } + } + }, + "dirs.CreateDirResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "created" + } + } + }, + "dirs.DeleteDirResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "deleted" + } + } + }, + "dirs.ListEntriesResponse": { + "description": "Unified response for listing files and directories in a directory", + "type": "object", + "properties": { + "directories": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"src\"", + "\"docs\"]" + ] + }, + "files": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"main.go\"", + "\"README.md\"]" + ] + }, + "skipped": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "[\"node_modules\"]" + ] + } + } + }, + "files.DeleteFileResponse": { + "type": "object", + "properties": { + "status": { + "type": "string", + "example": "deleted" + } + } + }, + "files.DirectoryTreeNode": { + "type": "object", + "properties": { + "children": { + "type": "array", + "items": { + "$ref": "#/definitions/files.DirectoryTreeNode" + } + }, + "isDir": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "path": { + "type": "string" + } + } + }, + "files.ListFilesResponse": { "type": "object", "properties": { - "chaincodes": { + "files": { "type": "array", "items": { - "$ref": "#/definitions/chainlaunchdeploy.ChaincodeResponse" - } + "type": "string" + }, + "example": [ + "[\"main.go\"", + "\"README.md\"]" + ] } } }, - "chainlaunchdeploy.PeerStatus": { + "files.ReadFileResponse": { "type": "object", "properties": { - "definition_id": { - "type": "integer" - }, - "id": { - "type": "integer" - }, - "last_updated": { - "description": "ISO8601", - "type": "string" - }, - "peer_id": { - "type": "integer" - }, - "status": { - "type": "string" + "content": { + "type": "string", + "example": "file contents" } } }, - "chainlaunchdeploy.UpdateChaincodeDefinitionRequest": { + "files.WriteFileRequest": { "type": "object", "properties": { - "chaincode_address": { - "description": "Chaincode address", - "type": "string" - }, - "docker_image": { - "description": "Docker image\nrequired: true", - "type": "string" - }, - "endorsement_policy": { - "description": "Endorsement policy", - "type": "string" + "content": { + "type": "string", + "example": "new file contents" }, - "sequence": { - "description": "Sequence\nrequired: true", - "type": "integer" + "path": { + "type": "string", + "example": "main.go" }, - "version": { - "description": "Version\nrequired: true", - "type": "string" + "project": { + "type": "string", + "example": "myproject" } } }, - "common.QueryResult": { + "files.WriteFileResponse": { "type": "object", "properties": { - "data": { - "type": "object", - "properties": { - "result": { - "type": "array", - "items": { - "type": "object", - "properties": { - "metric": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "value": { - "description": "For instant queries", - "type": "array", - "items": {} - }, - "values": { - "description": "For range queries (matrix)", - "type": "array", - "items": { - "type": "array", - "items": {} - } - } - } - } - }, - "resultType": { - "type": "string" - } - } - }, "status": { - "type": "string" + "type": "string", + "example": "written" } } }, @@ -10854,6 +12620,200 @@ } } }, + "projects.CommitDetailAPI": { + "type": "object", + "properties": { + "added": { + "type": "array", + "items": { + "type": "string" + } + }, + "author": { + "type": "string" + }, + "hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "modified": { + "type": "array", + "items": { + "type": "string" + } + }, + "parent": { + "type": "string" + }, + "removed": { + "type": "array", + "items": { + "type": "string" + } + }, + "timestamp": { + "type": "string" + } + } + }, + "projects.CommitWithFileChangesAPI": { + "type": "object", + "properties": { + "added": { + "type": "array", + "items": { + "type": "string" + } + }, + "author": { + "type": "string" + }, + "hash": { + "type": "string" + }, + "message": { + "type": "string" + }, + "modified": { + "type": "array", + "items": { + "type": "string" + } + }, + "parent": { + "type": "string" + }, + "removed": { + "type": "array", + "items": { + "type": "string" + } + }, + "timestamp": { + "type": "string" + } + } + }, + "projects.CommitsListResponse": { + "type": "object", + "properties": { + "commits": { + "type": "array", + "items": { + "$ref": "#/definitions/projects.CommitWithFileChangesAPI" + } + } + } + }, + "projects.CreateProjectRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer", + "example": 1 + } + } + }, + "projects.CreateProjectResponse": { + "type": "object", + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "containerPort": { + "type": "integer" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "id": { + "type": "integer", + "example": 1 + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer" + }, + "slug": { + "type": "string", + "example": "myproject-abc12" + } + } + }, + "projects.ListProjectsResponse": { + "type": "object", + "properties": { + "projects": { + "type": "array", + "items": { + "$ref": "#/definitions/projects.Project" + } + } + } + }, + "projects.Project": { + "type": "object", + "properties": { + "boilerplate": { + "type": "string", + "example": "go-basic" + }, + "containerPort": { + "type": "integer" + }, + "description": { + "type": "string", + "example": "A sample project" + }, + "id": { + "type": "integer", + "example": 1 + }, + "lastStartedAt": { + "type": "string" + }, + "lastStoppedAt": { + "type": "string" + }, + "name": { + "type": "string", + "example": "myproject" + }, + "networkId": { + "type": "integer" + }, + "slug": { + "type": "string", + "example": "myproject-abc12" + }, + "status": { + "type": "string", + "example": "running" + } + } + }, "registry.PluginMetadata": { "type": "object", "properties": { @@ -11332,6 +13292,18 @@ } } }, + "sql.NullString": { + "type": "object", + "properties": { + "string": { + "type": "string" + }, + "valid": { + "description": "Valid is true if String is not NULL", + "type": "boolean" + } + } + }, "time.Duration": { "type": "integer", "enum": [ @@ -12027,85 +13999,5 @@ "KeyUsageDecipherOnly" ] } - }, - "securityDefinitions": { - "BasicAuth": { - "type": "basic" - }, - "CookieAuth": { - "type": "apiKey", - "name": "session_id", - "in": "cookie" - } - }, - "tags": [ - { - "description": "Audit management operations", - "name": "Audit" - }, - { - "description": "User authentication and authorization operations", - "name": "Authentication" - }, - { - "description": "Backup schedule configuration and management", - "name": "Backup Schedules" - }, - { - "description": "Backup target location configuration and management", - "name": "Backup Targets" - }, - { - "description": "Backup management operations", - "name": "Backups" - }, - { - "description": "Hyperledger Besu network management operations", - "name": "Besu Networks" - }, - { - "description": "Hyperledger Fabric network management operations", - "name": "Fabric Networks" - }, - { - "description": "Cryptographic key management operations", - "name": "Keys" - }, - { - "description": "Metrics management operations", - "name": "Metrics" - }, - { - "description": "Network node management operations", - "name": "Nodes" - }, - { - "description": "System notification configuration and management", - "name": "Notifications" - }, - { - "description": "Organization management operations", - "name": "Organizations" - }, - { - "description": "Plugin management operations", - "name": "Plugins" - }, - { - "description": "Key provider management operations", - "name": "Providers" - }, - { - "description": "Settings management operations", - "name": "Settings" - }, - { - "description": "Smart contract management operations", - "name": "SmartContracts" - }, - { - "description": "User account management operations", - "name": "Users" - } - ] + } } \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 7e1861f..3c7423d 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -1,5 +1,75 @@ -basePath: /api/v1 definitions: + ai.Boilerplate: + properties: + description: + type: string + name: + type: string + path: + type: string + platform: + type: string + type: object + ai.ConversationDetail: + properties: + id: + type: integer + messages: + items: + $ref: '#/definitions/ai.Message' + type: array + projectId: + type: integer + startedAt: + type: string + type: object + ai.ConversationResponse: + properties: + id: + type: integer + projectId: + type: integer + startedAt: + type: string + type: object + ai.GenerateRequest: + properties: + projectId: + type: integer + prompt: + type: string + type: object + ai.GenerateResponse: + properties: + code: + type: string + type: object + ai.Message: + properties: + content: + type: string + conversationId: + type: integer + createdAt: + type: string + id: + type: integer + sender: + type: string + toolCalls: + items: + $ref: '#/definitions/db.ToolCall' + type: array + type: object + ai.Model: + properties: + description: + type: string + maxTokens: + type: integer + name: + type: string + type: object audit.Event: properties: affectedResource: @@ -438,13 +508,6 @@ definitions: $ref: '#/definitions/chainlaunchdeploy.ChaincodeResponse' type: object chainlaunchdeploy.DeployChaincodeByDefinitionRequest: - properties: - container_port: - description: Container port to use (optional, default 7052) - type: string - host_port: - description: Host port to use (optional) - type: string type: object chainlaunchdeploy.DeploymentResult: properties: @@ -765,6 +828,121 @@ definitions: status: type: string type: object + db.ToolCall: + properties: + arguments: + type: string + createdAt: + type: string + error: + $ref: '#/definitions/sql.NullString' + id: + type: integer + messageId: + type: integer + result: + $ref: '#/definitions/sql.NullString' + toolName: + type: string + type: object + dirs.CreateDirRequest: + properties: + dir: + example: newdir + type: string + project: + example: myproject + type: string + type: object + dirs.CreateDirResponse: + properties: + status: + example: created + type: string + type: object + dirs.DeleteDirResponse: + properties: + status: + example: deleted + type: string + type: object + dirs.ListEntriesResponse: + description: Unified response for listing files and directories in a directory + properties: + directories: + example: + - '["src"' + - '"docs"]' + items: + type: string + type: array + files: + example: + - '["main.go"' + - '"README.md"]' + items: + type: string + type: array + skipped: + example: + - '["node_modules"]' + items: + type: string + type: array + type: object + files.DeleteFileResponse: + properties: + status: + example: deleted + type: string + type: object + files.DirectoryTreeNode: + properties: + children: + items: + $ref: '#/definitions/files.DirectoryTreeNode' + type: array + isDir: + type: boolean + name: + type: string + path: + type: string + type: object + files.ListFilesResponse: + properties: + files: + example: + - '["main.go"' + - '"README.md"]' + items: + type: string + type: array + type: object + files.ReadFileResponse: + properties: + content: + example: file contents + type: string + type: object + files.WriteFileRequest: + properties: + content: + example: new file contents + type: string + path: + example: main.go + type: string + project: + example: myproject + type: string + type: object + files.WriteFileResponse: + properties: + status: + example: written + type: string + type: object github_com_chainlaunch_chainlaunch_pkg_metrics_common.Status: properties: deployment_mode: @@ -2459,6 +2637,138 @@ definitions: type: string type: array type: object + projects.CommitDetailAPI: + properties: + added: + items: + type: string + type: array + author: + type: string + hash: + type: string + message: + type: string + modified: + items: + type: string + type: array + parent: + type: string + removed: + items: + type: string + type: array + timestamp: + type: string + type: object + projects.CommitWithFileChangesAPI: + properties: + added: + items: + type: string + type: array + author: + type: string + hash: + type: string + message: + type: string + modified: + items: + type: string + type: array + parent: + type: string + removed: + items: + type: string + type: array + timestamp: + type: string + type: object + projects.CommitsListResponse: + properties: + commits: + items: + $ref: '#/definitions/projects.CommitWithFileChangesAPI' + type: array + type: object + projects.CreateProjectRequest: + properties: + boilerplate: + example: go-basic + type: string + description: + example: A sample project + type: string + name: + example: myproject + type: string + networkId: + example: 1 + type: integer + required: + - name + type: object + projects.CreateProjectResponse: + properties: + boilerplate: + example: go-basic + type: string + containerPort: + type: integer + description: + example: A sample project + type: string + id: + example: 1 + type: integer + name: + example: myproject + type: string + networkId: + type: integer + slug: + example: myproject-abc12 + type: string + type: object + projects.ListProjectsResponse: + properties: + projects: + items: + $ref: '#/definitions/projects.Project' + type: array + type: object + projects.Project: + properties: + boilerplate: + example: go-basic + type: string + containerPort: + type: integer + description: + example: A sample project + type: string + id: + example: 1 + type: integer + lastStartedAt: + type: string + lastStoppedAt: + type: string + name: + example: myproject + type: string + networkId: + type: integer + slug: + example: myproject-abc12 + type: string + status: + example: running + type: string + type: object registry.PluginMetadata: properties: author: @@ -2775,6 +3085,14 @@ definitions: peerTemplateCMD: type: string type: object + sql.NullString: + properties: + string: + type: string + valid: + description: Valid is true if String is not NULL + type: boolean + type: object time.Duration: enum: - -9223372036854775808 @@ -3280,21 +3598,1019 @@ definitions: - KeyUsageCRLSign - KeyUsageEncipherOnly - KeyUsageDecipherOnly -host: localhost:8100 info: - contact: - email: support@chainlaunch.dev - name: API Support - url: http://chainlaunch.dev/support - description: ChainLaunch API provides services for managing blockchain networks - and cryptographic keys - license: - name: Apache 2.0 - url: http://www.apache.org/licenses/LICENSE-2.0.html - termsOfService: http://swagger.io/terms/ - title: ChainLaunch API - version: "1.0" + contact: {} paths: + /api/v1/ai/{projectId}/conversations: + get: + description: Returns a list of all chat conversations associated with a specific + project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/ai.ConversationResponse' + type: array + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get all conversations for a project + tags: + - ai + /api/v1/ai/{projectId}/conversations/{conversationId}: + get: + description: Get all messages in a conversation + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Conversation ID + in: path + name: conversationId + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/ai.Message' + type: array + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get conversation messages + tags: + - ai + /api/v1/ai/{projectId}/conversations/{conversationId}/export: + get: + description: Get detailed information about a conversation including all messages + and metadata + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Conversation ID + in: path + name: conversationId + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/ai.ConversationDetail' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get conversation detail + tags: + - ai + /api/v1/ai/boilerplates: + get: + description: Returns a list of available boilerplates filtered by network platform + parameters: + - description: Network ID to filter boilerplates by platform + in: query + name: network_id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/ai.Boilerplate' + type: array + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get available boilerplates + tags: + - ai + /api/v1/ai/generate: + post: + consumes: + - application/json + description: Generates code based on the provided prompt and project context + parameters: + - description: Generation request + in: body + name: request + required: true + schema: + $ref: '#/definitions/ai.GenerateRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/ai.GenerateResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Generate code + tags: + - ai + /api/v1/ai/models: + get: + description: Returns a list of available AI models for code generation + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + $ref: '#/definitions/ai.Model' + type: array + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get available AI models + tags: + - ai + /api/v1/projects: + get: + description: Get a list of all projects + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/projects.ListProjectsResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: List all projects + tags: + - projects + post: + consumes: + - application/json + description: Create a new project, scaffold its directory, and store it in the + DB + parameters: + - description: Project info + in: body + name: request + required: true + schema: + $ref: '#/definitions/projects.CreateProjectRequest' + produces: + - application/json + responses: + "201": + description: Created + schema: + $ref: '#/definitions/projects.CreateProjectResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Create a project + tags: + - projects + /api/v1/projects/{id}: + get: + description: Get details of a project by its ID + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/projects.Project' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get a project by ID + tags: + - projects + /api/v1/projects/{id}/commits: + get: + description: Get a paginated list of commits for a project, including added/removed/modified + files + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + - description: Page number (default 1) + in: query + name: page + type: integer + - description: Page size (default 20) + in: query + name: pageSize + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/projects.CommitsListResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: List project commits with file changes + tags: + - projects + /api/v1/projects/{id}/commits/{commitHash}: + get: + description: Get details for a single commit, including file changes + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + - description: Commit hash + in: path + name: commitHash + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/projects.CommitDetailAPI' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get commit details + tags: + - projects + /api/v1/projects/{id}/diff: + get: + description: Get the diff of a file between two commits + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + - description: File path (relative to project root) + in: query + name: file + required: true + type: string + - description: From commit hash + in: query + name: from + required: true + type: string + - description: To commit hash + in: query + name: to + required: true + type: string + produces: + - text/plain + responses: + "200": + description: Diff + schema: + type: string + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get file diff between two commits + tags: + - projects + /api/v1/projects/{id}/file_at_commit: + get: + description: Get the contents of a file at a specific commit hash + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + - description: File path (relative to project root) + in: query + name: file + required: true + type: string + - description: Commit hash + in: query + name: commit + required: true + type: string + produces: + - text/plain + responses: + "200": + description: File contents + schema: + type: string + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get file contents at a specific commit + tags: + - projects + /api/v1/projects/{id}/logs: + get: + description: Stream or return the logs for the project's running container + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + produces: + - text/plain + responses: + "200": + description: Logs + schema: + type: string + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Get logs for a project server + tags: + - projects + /api/v1/projects/{id}/logs/stream: + get: + description: Stream logs for the project's running container using SSE + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + produces: + - text/event-stream + responses: + "200": + description: SSE stream of logs + schema: + type: string + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Stream real-time logs for a project server + tags: + - projects + /api/v1/projects/{id}/start: + post: + description: Start the server process for a given project using its boilerplate + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + additionalProperties: + type: string + type: object + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Start the server for a project + tags: + - projects + /api/v1/projects/{id}/stop: + post: + description: Stop the server process for a given project + parameters: + - description: Project ID + in: path + name: id + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + additionalProperties: + type: string + type: object + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Stop the server for a project + tags: + - projects + /api/v1/projects/{projectId}/dirs/create: + post: + consumes: + - application/json + description: Create a new directory in a project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Directory create info + in: body + name: request + required: true + schema: + $ref: '#/definitions/dirs.CreateDirRequest' + produces: + - application/json + responses: + "201": + description: Created + schema: + $ref: '#/definitions/dirs.CreateDirResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Create a directory + tags: + - directories + /api/v1/projects/{projectId}/dirs/delete: + delete: + consumes: + - application/json + description: Delete a directory in a project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Project name + in: query + name: project + required: true + type: string + - description: Directory to delete, relative to project root + in: query + name: dir + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/dirs.DeleteDirResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Delete a directory + tags: + - directories + /api/v1/projects/{projectId}/dirs/list: + get: + description: List files and directories in a given project and directory. Large + directories (e.g., node_modules) are summarized/skipped. + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Directory to list, relative to project root + in: query + name: dir + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/dirs.ListEntriesResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: List files and directories + tags: + - directories + /api/v1/projects/{projectId}/files/delete: + delete: + consumes: + - application/json + description: Delete a file in a project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: File path relative to project root + in: query + name: path + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/files.DeleteFileResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Delete a file + tags: + - files + /api/v1/projects/{projectId}/files/entries: + get: + description: List the full directory tree for a project, excluding large/ignored + folders (e.g., node_modules, .git) + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/files.DirectoryTreeNode' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: List full project directory tree + tags: + - files + /api/v1/projects/{projectId}/files/list: + get: + consumes: + - application/json + description: List files in a given project and directory + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: Directory to list, relative to project root + in: query + name: dir + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/files.ListFilesResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: List files + tags: + - files + /api/v1/projects/{projectId}/files/read: + get: + consumes: + - application/json + description: Get the contents of a file in a project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: File path relative to project root + in: query + name: path + required: true + type: string + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/files.ReadFileResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Read file contents + tags: + - files + /api/v1/projects/{projectId}/files/write: + post: + consumes: + - application/json + description: Write or modify the contents of a file in a project + parameters: + - description: Project ID + in: path + name: projectId + required: true + type: integer + - description: File write info + in: body + name: request + required: true + schema: + $ref: '#/definitions/files.WriteFileRequest' + produces: + - application/json + responses: + "201": + description: Created + schema: + $ref: '#/definitions/files.WriteFileResponse' + "400": + description: Bad Request + schema: + $ref: '#/definitions/response.ErrorResponse' + "401": + description: Unauthorized + schema: + $ref: '#/definitions/response.ErrorResponse' + "403": + description: Forbidden + schema: + $ref: '#/definitions/response.ErrorResponse' + "404": + description: Not Found + schema: + $ref: '#/definitions/response.ErrorResponse' + "409": + description: Conflict + schema: + $ref: '#/definitions/response.ErrorResponse' + "422": + description: Unprocessable Entity + schema: + $ref: '#/definitions/response.ErrorResponse' + "500": + description: Internal Server Error + schema: + $ref: '#/definitions/response.ErrorResponse' + summary: Write file contents + tags: + - files /audit/logs: get: consumes: @@ -8172,49 +9488,4 @@ paths: summary: Update user role tags: - Users -schemes: -- http -- https -securityDefinitions: - BasicAuth: - type: basic - CookieAuth: - in: cookie - name: session_id - type: apiKey swagger: "2.0" -tags: -- description: Audit management operations - name: Audit -- description: User authentication and authorization operations - name: Authentication -- description: Backup schedule configuration and management - name: Backup Schedules -- description: Backup target location configuration and management - name: Backup Targets -- description: Backup management operations - name: Backups -- description: Hyperledger Besu network management operations - name: Besu Networks -- description: Hyperledger Fabric network management operations - name: Fabric Networks -- description: Cryptographic key management operations - name: Keys -- description: Metrics management operations - name: Metrics -- description: Network node management operations - name: Nodes -- description: System notification configuration and management - name: Notifications -- description: Organization management operations - name: Organizations -- description: Plugin management operations - name: Plugins -- description: Key provider management operations - name: Providers -- description: Settings management operations - name: Settings -- description: Smart contract management operations - name: SmartContracts -- description: User account management operations - name: Users diff --git a/go.mod b/go.mod index f03b97e..878c536 100644 --- a/go.mod +++ b/go.mod @@ -52,6 +52,7 @@ require ( github.com/ProtonMail/go-crypto v1.1.6 // indirect github.com/StackExchange/wmi v1.2.1 // indirect github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect + github.com/anthropics/anthropic-sdk-go v1.4.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect github.com/aws/aws-sdk-go-v2/config v1.29.14 // indirect @@ -127,6 +128,8 @@ require ( github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/google/gnostic-models v0.6.9 // indirect github.com/google/go-cmp v0.7.0 // indirect + github.com/google/go-github/v45 v45.2.0 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/gorilla/mux v1.8.1 // indirect @@ -194,6 +197,7 @@ require ( github.com/multiformats/go-varint v0.0.7 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect + github.com/openai/openai-go v1.5.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect @@ -218,6 +222,10 @@ require ( github.com/spf13/cast v1.7.1 // indirect github.com/supranational/blst v0.3.14 // indirect github.com/theupdateframework/notary v0.7.0 // indirect + github.com/tidwall/gjson v1.14.4 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.1 // indirect + github.com/tidwall/sjson v1.2.5 // indirect github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 // indirect github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect @@ -298,6 +306,7 @@ require ( github.com/ipfs/go-ipfs-api v0.7.0 github.com/josharian/intern v1.0.0 // indirect github.com/mailru/easyjson v0.9.0 // indirect + github.com/sashabaranov/go-openai v1.40.1 github.com/sirupsen/logrus v1.9.3 github.com/spf13/pflag v1.0.6 // indirect github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect diff --git a/go.sum b/go.sum index d3a2c58..3a8951f 100644 --- a/go.sum +++ b/go.sum @@ -50,6 +50,8 @@ github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 h1:aM1 github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092/go.mod h1:rYqSE9HbjzpHTI74vwPvae4ZVYZd1lue2ta6xHPdblA= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/anthropics/anthropic-sdk-go v1.4.0 h1:fU1jKxYbQdQDiEXCxeW5XZRIOwKevn/PMg8Ay1nnUx0= +github.com/anthropics/anthropic-sdk-go v1.4.0/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= @@ -352,9 +354,14 @@ github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63Kqpo github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-github/v45 v45.2.0 h1:5oRLszbrkvxDDqBCNj2hjDZMKmvexaZ1xw/FCD+K3FI= +github.com/google/go-github/v45 v45.2.0/go.mod h1:FObaZJEDSTa/WGCzZ2Z3eoCDXWJKMenWWTrd8jrta28= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -601,6 +608,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.9.0/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA= github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= +github.com/openai/openai-go v1.5.0 h1:EcSBUYTiA4xbsO0VTX3i2WCPwKLMniwlVpiW/dCoXrc= +github.com/openai/openai-go v1.5.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= @@ -674,6 +683,8 @@ github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sashabaranov/go-openai v1.40.1 h1:bJ08Iwct5mHBVkuvG6FEcb9MDTfsXdTYPGjYLRdeTEU= +github.com/sashabaranov/go-openai v1.40.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= github.com/secure-systems-lab/go-securesystemslib v0.9.0 h1:rf1HIbL64nUpEIZnjLZ3mcNEL9NBPB0iuVjyxvq3LZc= github.com/secure-systems-lab/go-securesystemslib v0.9.0/go.mod h1:DVHKMcZ+V4/woA/peqr+L0joiRXbPpQ042GgJckkFgw= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= @@ -737,6 +748,16 @@ github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70 github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/theupdateframework/notary v0.7.0 h1:QyagRZ7wlSpjT5N2qQAh/pN+DVqgekv4DzbAiAiEL3c= github.com/theupdateframework/notary v0.7.0/go.mod h1:c9DRxcmhHmVLDay4/2fUYdISnHqbFDGRSlXPO0AhYWw= +github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM= +github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375 h1:QB54BJwA6x8QU9nHY3xJSZR2kX9bgpZekRKGkLTmEXA= github.com/tilt-dev/fsnotify v1.4.8-0.20220602155310-fff9c274a375/go.mod h1:xRroudyp5iVtxKqZCrA6n2TLFRBf8bmnjr1UD4x+z7g= github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= diff --git a/pkg/db/dev-queries.sql b/pkg/db/dev-queries.sql new file mode 100644 index 0000000..522e22c --- /dev/null +++ b/pkg/db/dev-queries.sql @@ -0,0 +1,53 @@ +-- name: ListProjects :many +SELECT * FROM projects ORDER BY created_at DESC; + +-- name: CreateProject :one +INSERT INTO projects (name, description, boilerplate, slug, network_id) VALUES (?, ?, ?, ?, ?) RETURNING *; + +-- name: DeleteProject :exec +DELETE FROM projects WHERE id = ?; + +-- name: GetProject :one +SELECT * FROM projects WHERE id = ?; + +-- name: GetProjectBySlug :one +SELECT * FROM projects WHERE slug = ?; + +-- name: CreateConversation :one +INSERT INTO conversations (project_id) VALUES (?) RETURNING *; + +-- name: GetDefaultConversationForProject :one +SELECT * FROM conversations WHERE project_id = ? ORDER BY started_at ASC LIMIT 1; + +-- name: InsertMessage :one +INSERT INTO messages (conversation_id, parent_id, sender, content) VALUES (?, ?, ?, ?) RETURNING *; + +-- name: ListMessagesForConversation :many +SELECT * FROM messages WHERE conversation_id = ? ORDER BY created_at ASC; + +-- name: ListConversationsForProject :many +SELECT * FROM conversations WHERE project_id = ? ORDER BY started_at ASC; + +-- name: InsertToolCall :one +INSERT INTO tool_calls (message_id, tool_name, arguments, result, error) +VALUES (?, ?, ?, ?, ?) RETURNING *; + +-- name: ListToolCallsForMessage :many +SELECT * FROM tool_calls WHERE message_id = ? ORDER BY created_at ASC; + +-- name: ListToolCallsForConversation :many +SELECT tc.* FROM tool_calls tc +JOIN messages m ON tc.message_id = m.id +WHERE m.conversation_id = ? +ORDER BY tc.created_at ASC; + +-- name: UpdateProjectContainerInfo :exec +UPDATE projects +SET + container_id = ?, + container_name = ?, + status = ?, + last_started_at = ?, + last_stopped_at = ?, + container_port = ? +WHERE id = ?; diff --git a/pkg/db/dev-queries.sql.go b/pkg/db/dev-queries.sql.go new file mode 100644 index 0000000..64e2f6d --- /dev/null +++ b/pkg/db/dev-queries.sql.go @@ -0,0 +1,409 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.28.0 +// source: dev-queries.sql + +package db + +import ( + "context" + "database/sql" +) + +const CreateConversation = `-- name: CreateConversation :one +INSERT INTO conversations (project_id) VALUES (?) RETURNING id, project_id, started_at +` + +func (q *Queries) CreateConversation(ctx context.Context, projectID int64) (*Conversation, error) { + row := q.db.QueryRowContext(ctx, CreateConversation, projectID) + var i Conversation + err := row.Scan(&i.ID, &i.ProjectID, &i.StartedAt) + return &i, err +} + +const CreateProject = `-- name: CreateProject :one +INSERT INTO projects (name, description, boilerplate, slug, network_id) VALUES (?, ?, ?, ?, ?) RETURNING id, name, description, boilerplate, created_at, updated_at, slug, container_id, container_name, status, last_started_at, last_stopped_at, container_port, network_id +` + +type CreateProjectParams struct { + Name string `json:"name"` + Description sql.NullString `json:"description"` + Boilerplate sql.NullString `json:"boilerplate"` + Slug string `json:"slug"` + NetworkID sql.NullInt64 `json:"networkId"` +} + +func (q *Queries) CreateProject(ctx context.Context, arg *CreateProjectParams) (*Project, error) { + row := q.db.QueryRowContext(ctx, CreateProject, + arg.Name, + arg.Description, + arg.Boilerplate, + arg.Slug, + arg.NetworkID, + ) + var i Project + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.Boilerplate, + &i.CreatedAt, + &i.UpdatedAt, + &i.Slug, + &i.ContainerID, + &i.ContainerName, + &i.Status, + &i.LastStartedAt, + &i.LastStoppedAt, + &i.ContainerPort, + &i.NetworkID, + ) + return &i, err +} + +const DeleteProject = `-- name: DeleteProject :exec +DELETE FROM projects WHERE id = ? +` + +func (q *Queries) DeleteProject(ctx context.Context, id int64) error { + _, err := q.db.ExecContext(ctx, DeleteProject, id) + return err +} + +const GetDefaultConversationForProject = `-- name: GetDefaultConversationForProject :one +SELECT id, project_id, started_at FROM conversations WHERE project_id = ? ORDER BY started_at ASC LIMIT 1 +` + +func (q *Queries) GetDefaultConversationForProject(ctx context.Context, projectID int64) (*Conversation, error) { + row := q.db.QueryRowContext(ctx, GetDefaultConversationForProject, projectID) + var i Conversation + err := row.Scan(&i.ID, &i.ProjectID, &i.StartedAt) + return &i, err +} + +const GetProject = `-- name: GetProject :one +SELECT id, name, description, boilerplate, created_at, updated_at, slug, container_id, container_name, status, last_started_at, last_stopped_at, container_port, network_id FROM projects WHERE id = ? +` + +func (q *Queries) GetProject(ctx context.Context, id int64) (*Project, error) { + row := q.db.QueryRowContext(ctx, GetProject, id) + var i Project + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.Boilerplate, + &i.CreatedAt, + &i.UpdatedAt, + &i.Slug, + &i.ContainerID, + &i.ContainerName, + &i.Status, + &i.LastStartedAt, + &i.LastStoppedAt, + &i.ContainerPort, + &i.NetworkID, + ) + return &i, err +} + +const GetProjectBySlug = `-- name: GetProjectBySlug :one +SELECT id, name, description, boilerplate, created_at, updated_at, slug, container_id, container_name, status, last_started_at, last_stopped_at, container_port, network_id FROM projects WHERE slug = ? +` + +func (q *Queries) GetProjectBySlug(ctx context.Context, slug string) (*Project, error) { + row := q.db.QueryRowContext(ctx, GetProjectBySlug, slug) + var i Project + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.Boilerplate, + &i.CreatedAt, + &i.UpdatedAt, + &i.Slug, + &i.ContainerID, + &i.ContainerName, + &i.Status, + &i.LastStartedAt, + &i.LastStoppedAt, + &i.ContainerPort, + &i.NetworkID, + ) + return &i, err +} + +const InsertMessage = `-- name: InsertMessage :one +INSERT INTO messages (conversation_id, parent_id, sender, content) VALUES (?, ?, ?, ?) RETURNING id, conversation_id, parent_id, sender, content, created_at +` + +type InsertMessageParams struct { + ConversationID int64 `json:"conversationId"` + ParentID sql.NullInt64 `json:"parentId"` + Sender string `json:"sender"` + Content string `json:"content"` +} + +func (q *Queries) InsertMessage(ctx context.Context, arg *InsertMessageParams) (*Message, error) { + row := q.db.QueryRowContext(ctx, InsertMessage, + arg.ConversationID, + arg.ParentID, + arg.Sender, + arg.Content, + ) + var i Message + err := row.Scan( + &i.ID, + &i.ConversationID, + &i.ParentID, + &i.Sender, + &i.Content, + &i.CreatedAt, + ) + return &i, err +} + +const InsertToolCall = `-- name: InsertToolCall :one +INSERT INTO tool_calls (message_id, tool_name, arguments, result, error) +VALUES (?, ?, ?, ?, ?) RETURNING id, message_id, tool_name, arguments, result, error, created_at +` + +type InsertToolCallParams struct { + MessageID int64 `json:"messageId"` + ToolName string `json:"toolName"` + Arguments string `json:"arguments"` + Result sql.NullString `json:"result"` + Error sql.NullString `json:"error"` +} + +func (q *Queries) InsertToolCall(ctx context.Context, arg *InsertToolCallParams) (*ToolCall, error) { + row := q.db.QueryRowContext(ctx, InsertToolCall, + arg.MessageID, + arg.ToolName, + arg.Arguments, + arg.Result, + arg.Error, + ) + var i ToolCall + err := row.Scan( + &i.ID, + &i.MessageID, + &i.ToolName, + &i.Arguments, + &i.Result, + &i.Error, + &i.CreatedAt, + ) + return &i, err +} + +const ListConversationsForProject = `-- name: ListConversationsForProject :many +SELECT id, project_id, started_at FROM conversations WHERE project_id = ? ORDER BY started_at ASC +` + +func (q *Queries) ListConversationsForProject(ctx context.Context, projectID int64) ([]*Conversation, error) { + rows, err := q.db.QueryContext(ctx, ListConversationsForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []*Conversation{} + for rows.Next() { + var i Conversation + if err := rows.Scan(&i.ID, &i.ProjectID, &i.StartedAt); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ListMessagesForConversation = `-- name: ListMessagesForConversation :many +SELECT id, conversation_id, parent_id, sender, content, created_at FROM messages WHERE conversation_id = ? ORDER BY created_at ASC +` + +func (q *Queries) ListMessagesForConversation(ctx context.Context, conversationID int64) ([]*Message, error) { + rows, err := q.db.QueryContext(ctx, ListMessagesForConversation, conversationID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []*Message{} + for rows.Next() { + var i Message + if err := rows.Scan( + &i.ID, + &i.ConversationID, + &i.ParentID, + &i.Sender, + &i.Content, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ListProjects = `-- name: ListProjects :many +SELECT id, name, description, boilerplate, created_at, updated_at, slug, container_id, container_name, status, last_started_at, last_stopped_at, container_port, network_id FROM projects ORDER BY created_at DESC +` + +func (q *Queries) ListProjects(ctx context.Context) ([]*Project, error) { + rows, err := q.db.QueryContext(ctx, ListProjects) + if err != nil { + return nil, err + } + defer rows.Close() + items := []*Project{} + for rows.Next() { + var i Project + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.Boilerplate, + &i.CreatedAt, + &i.UpdatedAt, + &i.Slug, + &i.ContainerID, + &i.ContainerName, + &i.Status, + &i.LastStartedAt, + &i.LastStoppedAt, + &i.ContainerPort, + &i.NetworkID, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ListToolCallsForConversation = `-- name: ListToolCallsForConversation :many +SELECT tc.id, tc.message_id, tc.tool_name, tc.arguments, tc.result, tc.error, tc.created_at FROM tool_calls tc +JOIN messages m ON tc.message_id = m.id +WHERE m.conversation_id = ? +ORDER BY tc.created_at ASC +` + +func (q *Queries) ListToolCallsForConversation(ctx context.Context, conversationID int64) ([]*ToolCall, error) { + rows, err := q.db.QueryContext(ctx, ListToolCallsForConversation, conversationID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []*ToolCall{} + for rows.Next() { + var i ToolCall + if err := rows.Scan( + &i.ID, + &i.MessageID, + &i.ToolName, + &i.Arguments, + &i.Result, + &i.Error, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const ListToolCallsForMessage = `-- name: ListToolCallsForMessage :many +SELECT id, message_id, tool_name, arguments, result, error, created_at FROM tool_calls WHERE message_id = ? ORDER BY created_at ASC +` + +func (q *Queries) ListToolCallsForMessage(ctx context.Context, messageID int64) ([]*ToolCall, error) { + rows, err := q.db.QueryContext(ctx, ListToolCallsForMessage, messageID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []*ToolCall{} + for rows.Next() { + var i ToolCall + if err := rows.Scan( + &i.ID, + &i.MessageID, + &i.ToolName, + &i.Arguments, + &i.Result, + &i.Error, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const UpdateProjectContainerInfo = `-- name: UpdateProjectContainerInfo :exec +UPDATE projects +SET + container_id = ?, + container_name = ?, + status = ?, + last_started_at = ?, + last_stopped_at = ?, + container_port = ? +WHERE id = ? +` + +type UpdateProjectContainerInfoParams struct { + ContainerID sql.NullString `json:"containerId"` + ContainerName sql.NullString `json:"containerName"` + Status sql.NullString `json:"status"` + LastStartedAt sql.NullTime `json:"lastStartedAt"` + LastStoppedAt sql.NullTime `json:"lastStoppedAt"` + ContainerPort sql.NullInt64 `json:"containerPort"` + ID int64 `json:"id"` +} + +func (q *Queries) UpdateProjectContainerInfo(ctx context.Context, arg *UpdateProjectContainerInfoParams) error { + _, err := q.db.ExecContext(ctx, UpdateProjectContainerInfo, + arg.ContainerID, + arg.ContainerName, + arg.Status, + arg.LastStartedAt, + arg.LastStoppedAt, + arg.ContainerPort, + arg.ID, + ) + return err +} diff --git a/pkg/db/migrations.go b/pkg/db/migrations.go new file mode 100644 index 0000000..eca4b97 --- /dev/null +++ b/pkg/db/migrations.go @@ -0,0 +1,41 @@ +package db + +import ( + "database/sql" + "embed" + "fmt" + + "github.com/golang-migrate/migrate/v4" + "github.com/golang-migrate/migrate/v4/database/sqlite3" + _ "github.com/golang-migrate/migrate/v4/source/file" + "github.com/golang-migrate/migrate/v4/source/iofs" +) + +//go:embed migrations/*.sql +var MigrationsFS embed.FS + +func RunMigrations(database *sql.DB) error { + driver, err := sqlite3.WithInstance(database, &sqlite3.Config{}) + if err != nil { + return fmt.Errorf("could not create sqlite driver: %v", err) + } + + // Use embedded migrations instead of file system + d, err := iofs.New(MigrationsFS, "migrations") + if err != nil { + return fmt.Errorf("could not create iofs driver: %v", err) + } + + m, err := migrate.NewWithInstance( + "iofs", d, + "sqlite3", driver, + ) + if err != nil { + return fmt.Errorf("could not create migrate instance: %v", err) + } + if err := m.Up(); err != nil && err != migrate.ErrNoChange { + return fmt.Errorf("could not run migrations: %v", err) + } + + return nil +} diff --git a/pkg/db/migrations/00012_dev_tables.down.sql b/pkg/db/migrations/00012_dev_tables.down.sql new file mode 100644 index 0000000..ab972a2 --- /dev/null +++ b/pkg/db/migrations/00012_dev_tables.down.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS tool_calls; +DROP TABLE IF EXISTS messages; +DROP TABLE IF EXISTS conversations; +DROP TABLE IF EXISTS projects; diff --git a/pkg/db/migrations/00012_dev_tables.up.sql b/pkg/db/migrations/00012_dev_tables.up.sql new file mode 100644 index 0000000..1a34744 --- /dev/null +++ b/pkg/db/migrations/00012_dev_tables.up.sql @@ -0,0 +1,51 @@ +-- Squashed migrations combining all schema changes + +-- Create projects table +CREATE TABLE projects ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + description TEXT, + boilerplate TEXT, -- Boilerplate template used for scaffolding + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + slug TEXT NOT NULL UNIQUE, + container_id TEXT, + container_name TEXT, + status TEXT, + last_started_at DATETIME, + last_stopped_at DATETIME, + container_port INTEGER, + network_id INTEGER REFERENCES networks(id) +); + +-- Create conversations table +CREATE TABLE IF NOT EXISTS conversations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + project_id INTEGER NOT NULL, + started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (project_id) REFERENCES projects(id) +); + +-- Create messages table +CREATE TABLE IF NOT EXISTS messages ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + conversation_id INTEGER NOT NULL, + parent_id INTEGER, + sender TEXT NOT NULL, -- 'user' or 'assistant' + content TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (conversation_id) REFERENCES conversations(id), + FOREIGN KEY (parent_id) REFERENCES messages(id) +); + +-- Create tool calls table +CREATE TABLE IF NOT EXISTS tool_calls ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + message_id INTEGER NOT NULL, + tool_name TEXT NOT NULL, + arguments TEXT NOT NULL, -- JSON string + result TEXT, -- JSON string + error TEXT, -- nullable error string + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (message_id) REFERENCES messages(id) +); \ No newline at end of file diff --git a/pkg/db/models.go b/pkg/db/models.go index aa33743..269e9ee 100644 --- a/pkg/db/models.go +++ b/pkg/db/models.go @@ -73,6 +73,12 @@ type BlockchainPlatform struct { Name string `json:"name"` } +type Conversation struct { + ID int64 `json:"id"` + ProjectID int64 `json:"projectId"` + StartedAt time.Time `json:"startedAt"` +} + type FabricChaincode struct { ID int64 `json:"id"` Name string `json:"name"` @@ -176,6 +182,15 @@ type KeyProviderType struct { Name string `json:"name"` } +type Message struct { + ID int64 `json:"id"` + ConversationID int64 `json:"conversationId"` + ParentID sql.NullInt64 `json:"parentId"` + Sender string `json:"sender"` + Content string `json:"content"` + CreatedAt time.Time `json:"createdAt"` +} + type Network struct { ID int64 `json:"id"` Name string `json:"name"` @@ -288,6 +303,23 @@ type Plugin struct { DeploymentStatus sql.NullString `json:"deploymentStatus"` } +type Project struct { + ID int64 `json:"id"` + Name string `json:"name"` + Description sql.NullString `json:"description"` + Boilerplate sql.NullString `json:"boilerplate"` + CreatedAt time.Time `json:"createdAt"` + UpdatedAt time.Time `json:"updatedAt"` + Slug string `json:"slug"` + ContainerID sql.NullString `json:"containerId"` + ContainerName sql.NullString `json:"containerName"` + Status sql.NullString `json:"status"` + LastStartedAt sql.NullTime `json:"lastStartedAt"` + LastStoppedAt sql.NullTime `json:"lastStoppedAt"` + ContainerPort sql.NullInt64 `json:"containerPort"` + NetworkID sql.NullInt64 `json:"networkId"` +} + type PrometheusConfig struct { ID int64 `json:"id"` PrometheusPort int64 `json:"prometheusPort"` @@ -325,6 +357,16 @@ type Setting struct { UpdatedAt sql.NullTime `json:"updatedAt"` } +type ToolCall struct { + ID int64 `json:"id"` + MessageID int64 `json:"messageId"` + ToolName string `json:"toolName"` + Arguments string `json:"arguments"` + Result sql.NullString `json:"result"` + Error sql.NullString `json:"error"` + CreatedAt time.Time `json:"createdAt"` +} + type User struct { ID int64 `json:"id"` Username string `json:"username"` diff --git a/pkg/db/querier.go b/pkg/db/querier.go index 626d7b4..b7acfcc 100644 --- a/pkg/db/querier.go +++ b/pkg/db/querier.go @@ -27,6 +27,7 @@ type Querier interface { CreateBackupTarget(ctx context.Context, arg *CreateBackupTargetParams) (*BackupTarget, error) CreateChaincode(ctx context.Context, arg *CreateChaincodeParams) (*FabricChaincode, error) CreateChaincodeDefinition(ctx context.Context, arg *CreateChaincodeDefinitionParams) (*FabricChaincodeDefinition, error) + CreateConversation(ctx context.Context, projectID int64) (*Conversation, error) CreateFabricOrganization(ctx context.Context, arg *CreateFabricOrganizationParams) (*FabricOrganization, error) CreateKey(ctx context.Context, arg *CreateKeyParams) (*Key, error) CreateKeyProvider(ctx context.Context, arg *CreateKeyProviderParams) (*KeyProvider, error) @@ -38,6 +39,7 @@ type Querier interface { CreateNodeEvent(ctx context.Context, arg *CreateNodeEventParams) (*NodeEvent, error) CreateNotificationProvider(ctx context.Context, arg *CreateNotificationProviderParams) (*NotificationProvider, error) CreatePlugin(ctx context.Context, arg *CreatePluginParams) (*Plugin, error) + CreateProject(ctx context.Context, arg *CreateProjectParams) (*Project, error) CreateSession(ctx context.Context, arg *CreateSessionParams) (*Session, error) CreateSetting(ctx context.Context, config string) (*Setting, error) CreateUser(ctx context.Context, arg *CreateUserParams) (*User, error) @@ -48,6 +50,7 @@ type Querier interface { DeleteBackupsByTarget(ctx context.Context, targetID int64) error DeleteChaincode(ctx context.Context, id int64) error DeleteChaincodeDefinition(ctx context.Context, id int64) error + DeleteChaincodesByNetwork(ctx context.Context, networkID int64) error DeleteExpiredSessions(ctx context.Context) error DeleteFabricOrganization(ctx context.Context, id int64) error DeleteKey(ctx context.Context, id int64) error @@ -58,6 +61,7 @@ type Querier interface { DeleteNotificationProvider(ctx context.Context, id int64) error DeleteOldBackups(ctx context.Context, arg *DeleteOldBackupsParams) error DeletePlugin(ctx context.Context, name string) error + DeleteProject(ctx context.Context, id int64) error DeleteRevokedCertificate(ctx context.Context, arg *DeleteRevokedCertificateParams) error DeleteSession(ctx context.Context, token string) error DeleteSetting(ctx context.Context, id int64) error @@ -76,6 +80,8 @@ type Querier interface { GetBackupsByStatus(ctx context.Context, status string) ([]*Backup, error) GetChaincode(ctx context.Context, id int64) (*GetChaincodeRow, error) GetChaincodeDefinition(ctx context.Context, id int64) (*FabricChaincodeDefinition, error) + GetConversation(ctx context.Context, id int64) (*Conversation, error) + GetDefaultConversationForProject(ctx context.Context, projectID int64) (*Conversation, error) GetDefaultNotificationProvider(ctx context.Context, type_ string) (*NotificationProvider, error) GetDefaultNotificationProviderForType(ctx context.Context, notificationType interface{}) (*NotificationProvider, error) GetDeploymentMetadata(ctx context.Context, name string) (interface{}, error) @@ -111,6 +117,8 @@ type Querier interface { GetOrganizationCRLInfo(ctx context.Context, id int64) (*GetOrganizationCRLInfoRow, error) GetPeerPorts(ctx context.Context) ([]*GetPeerPortsRow, error) GetPlugin(ctx context.Context, name string) (*Plugin, error) + GetProject(ctx context.Context, id int64) (*Project, error) + GetProjectBySlug(ctx context.Context, slug string) (*Project, error) GetPrometheusConfig(ctx context.Context) (*PrometheusConfig, error) GetProvidersByNotificationType(ctx context.Context, arg *GetProvidersByNotificationTypeParams) ([]*NotificationProvider, error) GetRecentCompletedBackups(ctx context.Context) ([]*Backup, error) @@ -123,6 +131,8 @@ type Querier interface { GetSetting(ctx context.Context, id int64) (*Setting, error) GetUser(ctx context.Context, id int64) (*User, error) GetUserByUsername(ctx context.Context, username string) (*User, error) + InsertMessage(ctx context.Context, arg *InsertMessageParams) (*Message, error) + InsertToolCall(ctx context.Context, arg *InsertToolCallParams) (*ToolCall, error) ListAuditLogs(ctx context.Context, arg *ListAuditLogsParams) ([]*AuditLog, error) ListBackupSchedules(ctx context.Context) ([]*BackupSchedule, error) ListBackupTargets(ctx context.Context) ([]*BackupTarget, error) @@ -132,11 +142,13 @@ type Querier interface { ListChaincodeDefinitionEvents(ctx context.Context, definitionID int64) ([]*FabricChaincodeDefinitionEvent, error) ListChaincodeDefinitions(ctx context.Context, chaincodeID int64) ([]*FabricChaincodeDefinition, error) ListChaincodes(ctx context.Context) ([]*FabricChaincode, error) + ListConversationsForProject(ctx context.Context, projectID int64) ([]*Conversation, error) ListFabricChaincodes(ctx context.Context) ([]*FabricChaincode, error) ListFabricOrganizations(ctx context.Context) ([]*FabricOrganization, error) ListFabricOrganizationsWithKeys(ctx context.Context, arg *ListFabricOrganizationsWithKeysParams) ([]*ListFabricOrganizationsWithKeysRow, error) ListKeyProviders(ctx context.Context) ([]*KeyProvider, error) ListKeys(ctx context.Context, arg *ListKeysParams) ([]*ListKeysRow, error) + ListMessagesForConversation(ctx context.Context, conversationID int64) ([]*Message, error) ListNetworkNodesByNetwork(ctx context.Context, networkID int64) ([]*NetworkNode, error) ListNetworkNodesByNode(ctx context.Context, nodeID int64) ([]*NetworkNode, error) ListNetworks(ctx context.Context) ([]*Network, error) @@ -149,7 +161,10 @@ type Querier interface { ListNotificationProviders(ctx context.Context) ([]*NotificationProvider, error) ListPeerStatuses(ctx context.Context, definitionID int64) ([]*FabricChaincodeDefinitionPeerStatus, error) ListPlugins(ctx context.Context) ([]*Plugin, error) + ListProjects(ctx context.Context) ([]*Project, error) ListSettings(ctx context.Context) ([]*Setting, error) + ListToolCallsForConversation(ctx context.Context, conversationID int64) ([]*ToolCall, error) + ListToolCallsForMessage(ctx context.Context, messageID int64) ([]*ToolCall, error) ListUsers(ctx context.Context) ([]*User, error) MarkBackupNotified(ctx context.Context, id int64) error ResetPrometheusConfig(ctx context.Context) (*PrometheusConfig, error) @@ -185,6 +200,7 @@ type Querier interface { UpdateNotificationProvider(ctx context.Context, arg *UpdateNotificationProviderParams) (*NotificationProvider, error) UpdateOrganizationCRL(ctx context.Context, arg *UpdateOrganizationCRLParams) error UpdatePlugin(ctx context.Context, arg *UpdatePluginParams) (*Plugin, error) + UpdateProjectContainerInfo(ctx context.Context, arg *UpdateProjectContainerInfoParams) error UpdatePrometheusConfig(ctx context.Context, arg *UpdatePrometheusConfigParams) (*PrometheusConfig, error) UpdateProviderTestResults(ctx context.Context, arg *UpdateProviderTestResultsParams) (*NotificationProvider, error) UpdateSetting(ctx context.Context, arg *UpdateSettingParams) (*Setting, error) diff --git a/pkg/db/queries.sql b/pkg/db/queries.sql index 51768a9..39a9ef8 100644 --- a/pkg/db/queries.sql +++ b/pkg/db/queries.sql @@ -437,6 +437,9 @@ SELECT COUNT(*) FROM networks; DELETE FROM networks WHERE id = ?; +-- name: DeleteChaincodesByNetwork :exec +DELETE FROM fabric_chaincodes WHERE network_id = ?; + -- name: UpdateNodeDeploymentConfig :one UPDATE nodes SET deployment_config = ?, @@ -1171,3 +1174,6 @@ INSERT INTO fabric_chaincode_definition_events (definition_id, event_type, event -- name: ListChaincodeDefinitionEvents :many SELECT id, definition_id, event_type, event_data, created_at FROM fabric_chaincode_definition_events WHERE definition_id = ? ORDER BY created_at ASC; + +-- name: GetConversation :one +SELECT id, project_id, started_at FROM conversations WHERE id = ? LIMIT 1; diff --git a/pkg/db/queries.sql.go b/pkg/db/queries.sql.go index fa73c7a..85125cc 100644 --- a/pkg/db/queries.sql.go +++ b/pkg/db/queries.sql.go @@ -1244,6 +1244,15 @@ func (q *Queries) DeleteChaincodeDefinition(ctx context.Context, id int64) error return err } +const DeleteChaincodesByNetwork = `-- name: DeleteChaincodesByNetwork :exec +DELETE FROM fabric_chaincodes WHERE network_id = ? +` + +func (q *Queries) DeleteChaincodesByNetwork(ctx context.Context, networkID int64) error { + _, err := q.db.ExecContext(ctx, DeleteChaincodesByNetwork, networkID) + return err +} + const DeleteExpiredSessions = `-- name: DeleteExpiredSessions :exec DELETE FROM sessions WHERE expires_at < CURRENT_TIMESTAMP ` @@ -1873,6 +1882,17 @@ func (q *Queries) GetChaincodeDefinition(ctx context.Context, id int64) (*Fabric return &i, err } +const GetConversation = `-- name: GetConversation :one +SELECT id, project_id, started_at FROM conversations WHERE id = ? LIMIT 1 +` + +func (q *Queries) GetConversation(ctx context.Context, id int64) (*Conversation, error) { + row := q.db.QueryRowContext(ctx, GetConversation, id) + var i Conversation + err := row.Scan(&i.ID, &i.ProjectID, &i.StartedAt) + return &i, err +} + const GetDefaultNotificationProvider = `-- name: GetDefaultNotificationProvider :one SELECT id, name, type, config, is_default, is_enabled, created_at, updated_at, notify_node_downtime, notify_backup_success, notify_backup_failure, notify_s3_connection_issue, last_test_at, last_test_status, last_test_message FROM notification_providers WHERE is_default = 1 AND type = ? diff --git a/pkg/networks/service/service.go b/pkg/networks/service/service.go index 89502e2..18cb3f2 100644 --- a/pkg/networks/service/service.go +++ b/pkg/networks/service/service.go @@ -242,6 +242,11 @@ func (s *NetworkService) DeleteNetwork(ctx context.Context, networkID int64) err return fmt.Errorf("failed to delete network record: %w", err) } + // Delete chaincodes associated with the network + if err := s.db.DeleteChaincodesByNetwork(ctx, networkID); err != nil { + return fmt.Errorf("failed to delete chaincodes: %w", err) + } + return nil } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 683e3a7..2e607a3 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -170,7 +170,7 @@ func (pm *PluginManager) processXSourceParameters(ctx context.Context, plugin *p } // Get the processed value for templates - processedValue, err := xsourceValue.GetValue(ctx) + processedValue, err := xsourceValue.GetValue(ctx, spec) if err != nil { return nil, nil, fmt.Errorf("failed to get x-source value for %s: %w", key, err) } diff --git a/pkg/plugin/xsource/fabric_key.go b/pkg/plugin/xsource/fabric_key.go index cd6bfb8..a0e4b9d 100644 --- a/pkg/plugin/xsource/fabric_key.go +++ b/pkg/plugin/xsource/fabric_key.go @@ -9,6 +9,7 @@ import ( "github.com/chainlaunch/chainlaunch/pkg/db" key "github.com/chainlaunch/chainlaunch/pkg/keymanagement/service" nodeservice "github.com/chainlaunch/chainlaunch/pkg/nodes/service" + ptypes "github.com/chainlaunch/chainlaunch/pkg/plugin/types" ) type FabricKeyValue struct { @@ -92,7 +93,7 @@ type FabricKeyDetails struct { KeyPath string // Path inside the container } -func (v *FabricKeyValue) GetValue(ctx context.Context) (interface{}, error) { +func (v *FabricKeyValue) GetValue(ctx context.Context, spec ptypes.ParameterSpec) (interface{}, error) { // Get key details from key management service key, err := v.keyManagement.GetKey(ctx, int(v.KeyID)) if err != nil { diff --git a/pkg/plugin/xsource/fabric_peer.go b/pkg/plugin/xsource/fabric_peer.go index 19acb77..788639a 100644 --- a/pkg/plugin/xsource/fabric_peer.go +++ b/pkg/plugin/xsource/fabric_peer.go @@ -10,6 +10,7 @@ import ( "github.com/chainlaunch/chainlaunch/pkg/db" nodeservice "github.com/chainlaunch/chainlaunch/pkg/nodes/service" "github.com/chainlaunch/chainlaunch/pkg/nodes/types" + ptypes "github.com/chainlaunch/chainlaunch/pkg/plugin/types" ) // FabricPeerValue represents a fabric-peer x-source value @@ -123,7 +124,7 @@ func (v *FabricPeerValue) Validate(ctx context.Context) error { return nil } -func (v *FabricPeerValue) GetValue(ctx context.Context) (interface{}, error) { +func (v *FabricPeerValue) GetValue(ctx context.Context, spec ptypes.ParameterSpec) (interface{}, error) { var details []*FabricPeerDetails for _, peerIDStr := range v.PeerIDs { @@ -155,11 +156,13 @@ func (v *FabricPeerValue) GetValue(ctx context.Context) (interface{}, error) { }) } - // If there's only one peer, return it directly + // Return array if spec type is array, otherwise return single peer if available + if spec.Type == "array" { + return details, nil + } if len(details) == 1 { return details[0], nil } - return details, nil } diff --git a/pkg/plugin/xsource/file.go b/pkg/plugin/xsource/file.go index d48cedd..c96bb3b 100644 --- a/pkg/plugin/xsource/file.go +++ b/pkg/plugin/xsource/file.go @@ -5,6 +5,8 @@ import ( "fmt" "os" "path/filepath" + + ptypes "github.com/chainlaunch/chainlaunch/pkg/plugin/types" ) // FileValue represents a file x-source value @@ -28,7 +30,7 @@ type FileTemplateValue struct { } // GetValue returns the path inside the container where the file will be mounted -func (f *FileValue) GetValue(ctx context.Context) (interface{}, error) { +func (f *FileValue) GetValue(ctx context.Context, spec ptypes.ParameterSpec) (interface{}, error) { if err := f.Validate(ctx); err != nil { return nil, err } @@ -77,6 +79,11 @@ func (f *FileValue) GetVolumeMounts(ctx context.Context) ([]VolumeMount, error) }, nil } +// NewFileHandler creates a new FileHandler +func NewFileHandler() *FileHandler { + return &FileHandler{} +} + // FileHandler implements XSourceHandler for file type type FileHandler struct{} diff --git a/pkg/plugin/xsource/registry.go b/pkg/plugin/xsource/registry.go index f9654e6..9c9c4e3 100644 --- a/pkg/plugin/xsource/registry.go +++ b/pkg/plugin/xsource/registry.go @@ -7,6 +7,7 @@ import ( "github.com/chainlaunch/chainlaunch/pkg/db" key "github.com/chainlaunch/chainlaunch/pkg/keymanagement/service" nodeservice "github.com/chainlaunch/chainlaunch/pkg/nodes/service" + ptypes "github.com/chainlaunch/chainlaunch/pkg/plugin/types" ) // Registry manages x-source handlers @@ -23,7 +24,7 @@ func NewRegistry(queries *db.Queries, nodeService *nodeservice.NodeService, keyM // Register default handlers r.Register(NewFabricKeyHandler(queries, nodeService, keyManagement)) r.Register(NewFabricPeerHandler(queries, nodeService)) - r.Register(&FileHandler{}) + r.Register(NewFileHandler()) return r } @@ -58,7 +59,7 @@ func (r *Registry) ValidateAndProcess(ctx context.Context, xSourceType XSourceTy return nil, err } - return xSourceValue.GetValue(ctx) + return xSourceValue.GetValue(ctx, ptypes.ParameterSpec{}) } // ListOptions returns the valid options for the specified x-source type diff --git a/pkg/plugin/xsource/types.go b/pkg/plugin/xsource/types.go index 431c91d..32d63c3 100644 --- a/pkg/plugin/xsource/types.go +++ b/pkg/plugin/xsource/types.go @@ -3,6 +3,8 @@ package xsource import ( "context" "fmt" + + "github.com/chainlaunch/chainlaunch/pkg/plugin/types" ) // XSourceType represents the type of x-source @@ -30,7 +32,7 @@ type XSourceValue interface { // Validate checks if the value is valid for this x-source type Validate(ctx context.Context) error // GetValue returns the processed value that can be used in templates - GetValue(ctx context.Context) (interface{}, error) + GetValue(ctx context.Context, spec types.ParameterSpec) (interface{}, error) // GetValidationValue returns the value used for validation GetValidationValue() string // GetVolumeMounts returns the volume mounts needed for this x-source diff --git a/pkg/scai/ai/ai.go b/pkg/scai/ai/ai.go new file mode 100644 index 0000000..8090721 --- /dev/null +++ b/pkg/scai/ai/ai.go @@ -0,0 +1,733 @@ +package ai + +import ( + "context" + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/db" + "github.com/chainlaunch/chainlaunch/pkg/logger" + "github.com/chainlaunch/chainlaunch/pkg/scai/sessionchanges" + "github.com/sashabaranov/go-openai" +) + +// AIClient defines the interface for AI model clients +type AIClient interface { + // CreateChatCompletion creates a chat completion with the given request + CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error) + // CreateChatCompletionStream creates a streaming chat completion + CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error) +} + +// ChatCompletionRequest represents a request to create a chat completion +type ChatCompletionRequest struct { + Model string + Messages []ChatCompletionMessage + Tools []Tool + Stream bool +} + +// ChatCompletionResponse represents a response from a chat completion +type ChatCompletionResponse struct { + Choices []ChatCompletionChoice +} + +// ChatCompletionChoice represents a single choice in a chat completion response +type ChatCompletionChoice struct { + Message ChatCompletionMessage + FinishReason string +} + +// ChatCompletionMessage represents a message in a chat completion +type ChatCompletionMessage struct { + Role string + Content string + ToolCalls []ToolCall + ToolCallID string +} + +// Tool represents a tool that can be used by the AI model +type Tool struct { + Type string + Function *FunctionDefinition +} + +// FunctionDefinition defines a function that can be called by the AI model +type FunctionDefinition struct { + Name string + Description string + Parameters map[string]interface{} +} + +// ToolCall represents a call to a tool by the AI model +type ToolCall struct { + ID string + Type string + Function FunctionCall +} + +// FunctionCall represents a function call within a tool call +type FunctionCall struct { + Name string + Arguments string +} + +// ChatCompletionStream represents a streaming chat completion +type ChatCompletionStream interface { + Recv() (ChatCompletionStreamResponse, error) + Close() +} + +// ChatCompletionStreamResponse represents a response from a streaming chat completion +type ChatCompletionStreamResponse struct { + Choices []ChatCompletionStreamChoice +} + +// ChatCompletionStreamChoice represents a choice in a streaming chat completion response +type ChatCompletionStreamChoice struct { + Delta ChatCompletionStreamDelta +} + +// ChatCompletionStreamDelta represents a delta in a streaming chat completion response +type ChatCompletionStreamDelta struct { + Content string + ToolCalls []ToolCall + Role string + FinishReason string +} + +// Helper functions to convert between our types and OpenAI's types +func convertMessages(messages []ChatCompletionMessage) []openai.ChatCompletionMessage { + result := make([]openai.ChatCompletionMessage, len(messages)) + for i, m := range messages { + result[i] = openai.ChatCompletionMessage{ + Role: m.Role, + Content: m.Content, + ToolCalls: convertToolCallsToOpenAI(m.ToolCalls), + ToolCallID: m.ToolCallID, + } + } + return result +} + +func convertTools(tools []Tool) []openai.Tool { + result := make([]openai.Tool, len(tools)) + for i, t := range tools { + result[i] = openai.Tool{ + Type: openai.ToolType(t.Type), + Function: &openai.FunctionDefinition{ + Name: t.Function.Name, + Description: t.Function.Description, + Parameters: t.Function.Parameters, + }, + } + } + return result +} + +func convertToolCallsToOpenAI(toolCalls []ToolCall) []openai.ToolCall { + result := make([]openai.ToolCall, len(toolCalls)) + for i, tc := range toolCalls { + result[i] = openai.ToolCall{ + ID: tc.ID, + Type: openai.ToolType(tc.Type), + Function: openai.FunctionCall{ + Name: tc.Function.Name, + Arguments: tc.Function.Arguments, + }, + } + } + return result +} + +func convertToolCallsFromOpenAI(toolCalls []openai.ToolCall) []ToolCall { + result := make([]ToolCall, len(toolCalls)) + for i, tc := range toolCalls { + result[i] = ToolCall{ + ID: tc.ID, + Type: string(tc.Type), + Function: FunctionCall{ + Name: tc.Function.Name, + Arguments: tc.Function.Arguments, + }, + } + } + return result +} + +func convertChoices(choices []openai.ChatCompletionChoice) []ChatCompletionChoice { + result := make([]ChatCompletionChoice, len(choices)) + for i, c := range choices { + result[i] = ChatCompletionChoice{ + Message: ChatCompletionMessage{ + Role: c.Message.Role, + Content: c.Message.Content, + ToolCalls: convertToolCallsFromOpenAI(c.Message.ToolCalls), + ToolCallID: c.Message.ToolCallID, + }, + FinishReason: string(c.FinishReason), + } + } + return result +} + +func convertStreamChoices(choices []openai.ChatCompletionStreamChoice) []ChatCompletionStreamChoice { + result := make([]ChatCompletionStreamChoice, len(choices)) + for i, c := range choices { + result[i] = ChatCompletionStreamChoice{ + Delta: ChatCompletionStreamDelta{ + Content: c.Delta.Content, + ToolCalls: convertToolCallsFromOpenAI(c.Delta.ToolCalls), + Role: c.Delta.Role, + FinishReason: "", // OpenAI's stream delta doesn't have FinishReason + }, + } + } + return result +} + +// ToolSchema defines a tool with its JSON schema and handler. +type ToolSchema struct { + Name string + Description string + Parameters map[string]interface{} // JSON schema + Handler func(projectRoot string, args map[string]interface{}) (interface{}, error) +} + +// GetDefaultToolSchemas returns all registered tools with their schemas and handlers, scoped to a project root. +func GetDefaultToolSchemas(projectRoot string) []ToolSchema { + return []ToolSchema{ + { + Name: "read_file", + Description: "Read the contents of a file.", + Parameters: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Path to the file (relative to project root)"}, + }, + "required": []string{"path"}, + }, + Handler: func(funcName string, args map[string]interface{}) (interface{}, error) { + path, _ := args["path"].(string) + absPath := filepath.Join(projectRoot, path) + data, err := os.ReadFile(absPath) + if err != nil { + return nil, err + } + return map[string]interface{}{"content": string(data)}, nil + }, + }, + { + Name: "write_file", + Description: "Write content to a file.", + Parameters: map[string]interface{}{ + "type": "object", + "properties": map[string]interface{}{ + "path": map[string]interface{}{"type": "string", "description": "Path to the file (relative to project root)"}, + "content": map[string]interface{}{"type": "string", "description": "Content to write"}, + }, + "required": []string{"path", "content"}, + }, + Handler: func(funcName string, args map[string]interface{}) (interface{}, error) { + path, _ := args["path"].(string) + content, _ := args["content"].(string) + absPath := filepath.Join(projectRoot, path) + if err := os.WriteFile(absPath, []byte(content), 0644); err != nil { + return nil, err + } + sessionchanges.RegisterChange(absPath) + return map[string]interface{}{"result": "file written successfully"}, nil + }, + }, + } +} + +// getToolSchemas returns all registered tools with their schemas and handlers. +func getToolSchemas(projectRoot string) []ToolSchema { + return GetDefaultToolSchemas(projectRoot) +} + +// OpenAIChatService implements ChatServiceInterface using OpenAI's API and function-calling tools. +type OpenAIChatService struct { + Client AIClient + Logger *logger.Logger + ChatService *ChatService + Queries *db.Queries + ProjectsDir string + Model string +} + +func NewOpenAIChatService(apiKey string, logger *logger.Logger, chatService *ChatService, queries *db.Queries, projectsDir string) *OpenAIChatService { + return &OpenAIChatService{ + Client: NewOpenAIAdapter(apiKey), + Logger: logger, + ChatService: chatService, + Queries: queries, + ProjectsDir: projectsDir, + Model: "gpt-4o", + } +} + +func NewOpenAIChatServiceWithClient(client AIClient, logger *logger.Logger, chatService *ChatService, queries *db.Queries, projectsDir string, model string) *OpenAIChatService { + return &OpenAIChatService{ + Client: client, + Logger: logger, + ChatService: chatService, + Queries: queries, + ProjectsDir: projectsDir, + Model: model, + } +} + +// getProjectStructurePrompt generates a system prompt with the project structure and file contents. +func getProjectStructurePrompt(projectRoot string) string { + ignored := map[string]bool{ + "node_modules": true, + ".git": true, + ".DS_Store": true, + } + var sb strings.Builder + sb.WriteString(` +You are an expert AI coding agent. +All projects use Bun (TypeScript) as the runtime and build system. +Here is the current project structure and contents. + +Be proactive, read and write files as needed, your goal is to progress in the project and write the code to achieve the goal. Including fixing issues. +`) + filepath.Walk(projectRoot, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + rel, _ := filepath.Rel(projectRoot, path) + parts := strings.Split(rel, string(os.PathSeparator)) + for _, part := range parts { + if ignored[part] { + if info.IsDir() { + return filepath.SkipDir + } + return nil + } + } + if info.IsDir() { + return nil + } + // Only include files < 32KB + if info.Size() < 32*1024 { + data, err := os.ReadFile(path) + if err == nil { + sb.WriteString("\n---\nFile: " + rel + "\n" + string(data) + "\n---\n") + } + } else { + sb.WriteString("\n---\nFile: " + rel + " (too large to display)\n---\n") + } + return nil + }) + return sb.String() +} + +const maxAgentSteps = 10 + +// handleToolCall executes a tool call and returns the result as a string. +func (s *OpenAIChatService) handleToolCall(toolCall openai.ToolCall, projectRoot string) string { + toolSchemas := getToolSchemas(projectRoot) + var tool ToolSchema + ok := false + for _, t := range toolSchemas { + if t.Name == toolCall.Function.Name { + tool = t + ok = true + break + } + } + if !ok { + return `{"error": "Unknown tool function: ` + toolCall.Function.Name + `"}` + } + var args map[string]interface{} + if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil { + return `{"error": "Failed to parse arguments: ` + err.Error() + `"}` + } + result, err := tool.Handler(projectRoot, args) + if err != nil { + return `{"error": "Tool error: ` + err.Error() + `"}` + } + resultJson, _ := json.Marshal(result) + return string(resultJson) +} + +// StreamChat uses a multi-step tool execution loop with OpenAI function-calling. +func (s *OpenAIChatService) StreamChat(ctx context.Context, project *db.Project, conversationID int64, messages []Message, observer AgentStepObserver, maxSteps int) error { + var chatMsgs []ChatCompletionMessage + projectID := project.ID + projectSlug := project.Slug + projectRoot := filepath.Join(s.ProjectsDir, projectSlug) + systemPrompt := getProjectStructurePrompt(projectRoot) + s.Logger.Debugf("[StreamChat] projectID: %s", projectID) + s.Logger.Debugf("[StreamChat] projectRoot: %s", projectRoot) + s.Logger.Debugf("[StreamChat] systemPrompt: %s", systemPrompt) + chatMsgs = append(chatMsgs, ChatCompletionMessage{ + Role: "system", + Content: systemPrompt, + }) + var lastParentMsgID *int64 + for i, m := range messages { + role := "user" + if m.Sender == "assistant" { + role = "assistant" + } + msg := ChatCompletionMessage{ + Role: role, + Content: m.Content, + } + chatMsgs = append(chatMsgs, msg) + s.Logger.Debugf("[StreamChat] input message: %d, %v", i, msg) + } + + toolSchemas := getToolSchemas(projectRoot) + toolSchemasMap := make(map[string]ToolSchema) + for _, tool := range toolSchemas { + toolSchemasMap[tool.Name] = tool + } + tools := []Tool{} + for _, tool := range toolSchemas { + tools = append(tools, Tool{ + Type: "function", + Function: &FunctionDefinition{ + Name: tool.Name, + Description: tool.Description, + Parameters: tool.Parameters, + }, + }) + } + + if maxSteps <= 0 { + maxSteps = maxAgentSteps + } + + for step := 0; step < maxSteps; step++ { + s.Logger.Debugf("[StreamChat] Agent step: %d", step) + msg, err := StreamAgentStep( + ctx, + s.Client, + chatMsgs, + s.Model, + tools, + toolSchemasMap, + observer, + ) + if err != nil { + s.Logger.Debugf("[StreamChat] Error in StreamAgentStep: %v", err) + return err + } + + s.Logger.Debugf("[StreamChat] Agent step: %d, assistant message: %s", step, msg.Content) + if len(msg.ToolCalls) > 0 { + s.Logger.Debugf("[StreamChat] Tool calls in step: %d, %v", step, msg.ToolCalls) + } + + chatMsgs = append(chatMsgs, msg) + + // If no tool calls, we're done + if len(msg.ToolCalls) == 0 { + s.Logger.Debugf("[StreamChat] No tool calls in step: %d - finishing", step) + return nil + } + + // Process all tool calls in this step + for _, toolCall := range msg.ToolCalls { + s.Logger.Debugf("[StreamChat] Handling tool call: %s, args: %s", toolCall.Function.Name, toolCall.Function.Arguments) + resultObj, _ := s.executeAndSerializeToolCall(toolCall, projectRoot) + resultStr := resultObj.resultStr + errStr := resultObj.errStr + argsStr := resultObj.argsStr + s.Logger.Debugf("[StreamChat] Tool result for: %s, %v", toolCall.Function.Name, resultStr) + + // Add tool result message to DB and get its ID, set parentID to lastParentMsgID + toolMsg, err := s.ChatService.AddMessage(ctx, conversationID, lastParentMsgID, "tool", resultStr) + if err != nil { + s.Logger.Debugf("[StreamChat] Failed to persist tool message: %v", err) + continue + } + // Persist tool call + _, err = s.ChatService.AddToolCall(ctx, toolMsg.ID, toolCall.Function.Name, argsStr, resultStr, errStr) + if err != nil { + s.Logger.Debugf("[StreamChat] Failed to persist tool call: %v", err) + } + // Add tool result message to chatMsgs for next step + chatMsgs = append(chatMsgs, ChatCompletionMessage{ + Role: "tool", + Content: resultStr, + ToolCallID: toolCall.ID, + }) + } + } + + // If we reach max steps, notify observer and make one final call and stream the response + if observer != nil { + observer.OnMaxStepsReached() + } + s.Logger.Debugf("[StreamChat] Reached maxSteps, making final call") + msg, err := StreamAgentStep( + ctx, + s.Client, + chatMsgs, + s.Model, + tools, + toolSchemasMap, + observer, + ) + if err != nil { + s.Logger.Debugf("[StreamChat] Error in final StreamAgentStep: %v", err) + return err + } + chatMsgs = append(chatMsgs, msg) + s.Logger.Debugf("[StreamChat] Final assistant message: %s", msg.Content) + if len(msg.ToolCalls) > 0 { + s.Logger.Debugf("[StreamChat] Final tool calls: %v", msg.ToolCalls) + } + + return nil +} + +// Helper to execute a tool call and serialize args/result/error +func (s *OpenAIChatService) executeAndSerializeToolCall(toolCall ToolCall, projectRoot string) (struct { + resultStr, argsStr string + errStr *string +}, error) { + var args map[string]interface{} + if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil { + errMsg := err.Error() + return struct { + resultStr, argsStr string + errStr *string + }{"", toolCall.Function.Arguments, &errMsg}, err + } + result, err := getToolSchemas(projectRoot)[0].Handler(projectRoot, args) // Find the correct handler + var resultStr string + if result != nil { + b, _ := json.Marshal(result) + resultStr = string(b) + } + var errStr *string + if err != nil { + errMsg := err.Error() + errStr = &errMsg + } + argsStr, _ := json.Marshal(args) + return struct { + resultStr, argsStr string + errStr *string + }{resultStr, string(argsStr), errStr}, nil +} + +// AgentStepObserver defines hooks for observing agent step events. +type AgentStepObserver interface { + OnLLMContent(content string) + OnToolCallStart(toolCallID, name string) + OnToolCallUpdate(toolCallID, name, arguments string) + OnToolCallExecute(toolCallID, name string, args map[string]interface{}) + OnToolCallResult(toolCallID, name string, result interface{}, err error) + OnMaxStepsReached() +} + +// StreamAgentStep streams the assistant's response for a single agent step, executes tool calls if present, and streams tool execution progress. +func StreamAgentStep( + ctx context.Context, + client AIClient, + messages []ChatCompletionMessage, + model string, + tools []Tool, + toolSchemas map[string]ToolSchema, + observer AgentStepObserver, +) (ChatCompletionMessage, error) { + var contentBuilder strings.Builder + toolCallsMap := map[string]*ToolCall{} + var lastToolCallID string + + stream, err := client.CreateChatCompletionStream(ctx, ChatCompletionRequest{ + Model: model, + Messages: messages, + Tools: tools, + Stream: true, + }) + if err != nil { + return ChatCompletionMessage{}, err + } + defer stream.Close() + + for { + response, err := stream.Recv() + if err != nil { + if err == io.EOF { + break + } + return ChatCompletionMessage{}, err + } + for _, choice := range response.Choices { + if choice.Delta.Content != "" { + contentBuilder.WriteString(choice.Delta.Content) + if observer != nil { + observer.OnLLMContent(choice.Delta.Content) + } + } + + for _, tc := range choice.Delta.ToolCalls { + if tc.ID != "" { + lastToolCallID = tc.ID + if _, ok := toolCallsMap[tc.ID]; !ok { + toolCallsMap[tc.ID] = &ToolCall{ + ID: tc.ID, + Type: tc.Type, + Function: tc.Function, + } + if observer != nil { + observer.OnToolCallStart(tc.ID, tc.Function.Name) + } + } + } + if lastToolCallID != "" { + toolCall := toolCallsMap[lastToolCallID] + updated := false + if tc.Function.Name != "" && toolCall.Function.Name != tc.Function.Name { + toolCall.Function.Name = tc.Function.Name + updated = true + } + if tc.Function.Arguments != "" { + toolCall.Function.Arguments += tc.Function.Arguments + updated = true + } + if observer != nil && updated { + observer.OnToolCallUpdate(lastToolCallID, toolCall.Function.Name, toolCall.Function.Arguments) + } + } + } + + if choice.Delta.FinishReason == "tool_calls" { + lastToolCallID = "" + break + } + } + } + + var toolCalls []ToolCall + for _, tc := range toolCallsMap { + toolCalls = append(toolCalls, *tc) + } + assistantMsg := ChatCompletionMessage{ + Role: "assistant", + Content: contentBuilder.String(), + ToolCalls: toolCalls, + } + + for _, toolCall := range toolCalls { + toolSchema, ok := toolSchemas[toolCall.Function.Name] + if !ok { + if observer != nil { + observer.OnToolCallResult(toolCall.ID, toolCall.Function.Name, nil, + fmt.Errorf("Unknown tool function: %s", toolCall.Function.Name)) + } + continue + } + var args map[string]interface{} + err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args) + if err != nil { + if observer != nil { + observer.OnToolCallResult(toolCall.ID, toolCall.Function.Name, nil, err) + } + continue + } + if observer != nil { + observer.OnToolCallExecute(toolCall.ID, toolCall.Function.Name, args) + } + result, err := toolSchema.Handler(toolCall.Function.Name, args) + if observer != nil { + observer.OnToolCallResult(toolCall.ID, toolCall.Function.Name, result, err) + } + if err != nil { + continue + } + } + + return assistantMsg, nil +} + +// streamingObserver wraps an AgentStepObserver and captures assistant tokens +// for persistence after streaming. +type streamingObserver struct { + AgentStepObserver + onAssistantToken func(token string) +} + +func (o *streamingObserver) OnLLMContent(content string) { + if o.AgentStepObserver != nil { + o.AgentStepObserver.OnLLMContent(content) + } + if o.onAssistantToken != nil { + o.onAssistantToken(content) + } +} + +// ChatWithPersistence handles chat with DB persistence for a project. +func (s *OpenAIChatService) ChatWithPersistence( + ctx context.Context, + projectID int64, + userMessage string, + observer AgentStepObserver, + maxSteps int, +) error { + project, err := s.Queries.GetProject(ctx, projectID) + if err != nil { + return err + } + if s.ChatService == nil { + return fmt.Errorf("ChatService is not configured") + } + // 1. Ensure conversation exists + conv, err := s.ChatService.EnsureConversationForProject(ctx, projectID) + if err != nil { + return err + } + + // 2. Add the new user message to the DB + _, err = s.ChatService.AddMessage(ctx, conv.ID, nil, "user", userMessage) + if err != nil { + return err + } + + // 3. Fetch all messages again (now includes the new user message) + dbMessages, err := s.ChatService.GetMessages(ctx, conv.ID) + if err != nil { + return err + } + var messages []Message + for _, m := range dbMessages { + messages = append(messages, Message{ + ID: m.ID, + ConversationID: m.ConversationID, + Sender: m.Sender, + Content: m.Content, + CreatedAt: m.CreatedAt.Format(time.RFC3339), + }) + } + + // 4. Call the streaming chat logic (this will stream and also generate the assistant reply) + var assistantReply strings.Builder + streamObserver := &streamingObserver{ + AgentStepObserver: observer, + onAssistantToken: func(token string) { + assistantReply.WriteString(token) + }, + } + err = s.StreamChat(ctx, project, conv.ID, messages, streamObserver, maxSteps) + if err != nil { + return err + } + + // 5. Store the assistant's reply in the DB + _, err = s.ChatService.AddMessage(ctx, conv.ID, nil, "assistant", assistantReply.String()) + return err +} diff --git a/pkg/scai/ai/chat_service.go b/pkg/scai/ai/chat_service.go new file mode 100644 index 0000000..cfbf60f --- /dev/null +++ b/pkg/scai/ai/chat_service.go @@ -0,0 +1,167 @@ +package ai + +import ( + "context" + "database/sql" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/db" +) + +type ChatService struct { + Queries *db.Queries +} + +type Conversation struct { + ID int64 + ProjectID int64 + StartedAt time.Time +} + +func NewChatService(queries *db.Queries) *ChatService { + return &ChatService{Queries: queries} +} + +// EnsureConversationForProject returns the default conversation for a project, creating it if needed. +func (s *ChatService) EnsureConversationForProject(ctx context.Context, projectID int64) (Conversation, error) { + conv, err := s.Queries.GetDefaultConversationForProject(ctx, projectID) + if err == sql.ErrNoRows { + // Create new conversation + row, err := s.Queries.CreateConversation(ctx, projectID) + if err != nil { + return Conversation{}, err + } + return Conversation{ + ID: row.ID, + ProjectID: row.ProjectID, + StartedAt: row.StartedAt, + }, nil + } else if err != nil { + return Conversation{}, err + } + return Conversation{ + ID: conv.ID, + ProjectID: conv.ProjectID, + StartedAt: conv.StartedAt, + }, nil +} + +// AddMessage stores a message in the conversation. Accepts optional parentID. +func (s *ChatService) AddMessage(ctx context.Context, conversationID int64, parentID *int64, sender, content string) (*db.Message, error) { + var parentNull sql.NullInt64 + if parentID != nil { + parentNull = sql.NullInt64{Int64: *parentID, Valid: true} + } + row, err := s.Queries.InsertMessage(ctx, &db.InsertMessageParams{ + ConversationID: conversationID, + ParentID: parentNull, + Sender: sender, + Content: content, + }) + if err != nil { + return nil, err + } + return row, nil +} + +// GetMessages returns all messages for a conversation. +func (s *ChatService) GetMessages(ctx context.Context, conversationID int64) ([]*db.Message, error) { + return s.Queries.ListMessagesForConversation(ctx, conversationID) +} + +// AddToolCall stores a tool call for a message. +func (s *ChatService) AddToolCall(ctx context.Context, messageID int64, toolName, arguments, result string, errStr *string) (*db.ToolCall, error) { + var resultNull sql.NullString + if result != "" { + resultNull = sql.NullString{String: result, Valid: true} + } + var errorNull sql.NullString + if errStr != nil { + errorNull = sql.NullString{String: *errStr, Valid: true} + } + return s.Queries.InsertToolCall(ctx, &db.InsertToolCallParams{ + MessageID: messageID, + ToolName: toolName, + Arguments: arguments, + Result: resultNull, + Error: errorNull, + }) +} + +// GetConversationMessages returns all messages for a conversation with their tool calls. +func (s *ChatService) GetConversationMessages(ctx context.Context, projectID, conversationID int64) ([]Message, error) { + // Get all messages for the conversation + messages, err := s.Queries.ListMessagesForConversation(ctx, conversationID) + if err != nil { + return nil, err + } + + // Get tool calls for all messages + toolCallsByMsg := make(map[int64][]*db.ToolCall) + for _, msg := range messages { + toolCalls, _ := s.Queries.ListToolCallsForMessage(ctx, msg.ID) + toolCallsByMsg[msg.ID] = toolCalls + } + + // Convert messages to response format + var result []Message + for _, msg := range messages { + result = append(result, Message{ + ID: msg.ID, + ConversationID: msg.ConversationID, + Sender: msg.Sender, + Content: msg.Content, + CreatedAt: msg.CreatedAt.Format(time.RFC3339), + ToolCalls: toolCallsByMsg[msg.ID], + }) + } + + return result, nil +} + +// GetConversationDetail returns detailed information about a conversation. +func (s *ChatService) GetConversationDetail(ctx context.Context, projectID, conversationID int64) (*ConversationDetail, error) { + // Get conversation info + conv, err := s.Queries.GetDefaultConversationForProject(ctx, projectID) + if err != nil { + return nil, err + } + + // Get all messages with their tool calls + messages, err := s.GetConversationMessages(ctx, projectID, conversationID) + if err != nil { + return nil, err + } + + return &ConversationDetail{ + ID: conv.ID, + ProjectID: conv.ProjectID, + StartedAt: conv.StartedAt.Format(time.RFC3339), + Messages: messages, + }, nil +} + +// GenerateCode generates code using the AI service. +func (s *ChatService) GenerateCode(ctx context.Context, prompt string, project *db.Project) (string, error) { + // This is a placeholder implementation. In a real implementation, this would use the AI service + // to generate code based on the prompt and project context. + return "// Generated code placeholder", nil +} + +// Message represents a chat message with its tool calls +type Message struct { + ID int64 `json:"id"` + ConversationID int64 `json:"conversationId"` + Sender string `json:"sender"` + Content string `json:"content"` + CreatedAt string `json:"createdAt"` + ToolCalls []*db.ToolCall `json:"toolCalls,omitempty"` +} + +// ConversationDetail represents detailed information about a conversation +type ConversationDetail struct { + ID int64 `json:"id"` + ProjectID int64 `json:"projectId"` + StartedAt string `json:"startedAt"` + Messages []Message `json:"messages"` +} diff --git a/pkg/scai/ai/claude.go b/pkg/scai/ai/claude.go new file mode 100644 index 0000000..8cbbea6 --- /dev/null +++ b/pkg/scai/ai/claude.go @@ -0,0 +1,157 @@ +package ai + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/anthropics/anthropic-sdk-go" + "github.com/anthropics/anthropic-sdk-go/packages/ssestream" + "github.com/chainlaunch/chainlaunch/pkg/db" + "github.com/chainlaunch/chainlaunch/pkg/logger" +) + +// ClaudeAdapter adapts Anthropic's Claude API to our AIClient interface +type ClaudeAdapter struct { + client anthropic.Client +} + +// NewClaudeAdapter creates a new Claude adapter +func NewClaudeAdapter(apiKey string) *ClaudeAdapter { + return &ClaudeAdapter{ + client: anthropic.NewClient(), + } +} + +// CreateChatCompletion implements AIClient interface for Claude +func (a *ClaudeAdapter) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error) { + // Convert our messages to Claude's format + claudeMessages := make([]anthropic.MessageParam, len(req.Messages)) + for i, m := range req.Messages { + claudeMessages[i] = anthropic.NewUserMessage(anthropic.NewTextBlock(m.Content)) + } + + // Convert our tools to Claude's format + claudeTools := make([]anthropic.ToolUnionParam, len(req.Tools)) + for i, t := range req.Tools { + claudeTools[i] = anthropic.ToolUnionParamOfTool(anthropic.ToolInputSchemaParam{ + Type: "object", + Properties: t.Function.Parameters, + }, t.Function.Name) + } + + // Call Claude + resp, err := a.client.Messages.New(ctx, anthropic.MessageNewParams{ + Model: anthropic.Model(req.Model), + Messages: claudeMessages, + Tools: claudeTools, + }) + if err != nil { + return ChatCompletionResponse{}, err + } + + // Convert Claude's response to our format + var toolCalls []ToolCall + for _, block := range resp.Content { + if block.Type == "tool_use" { + toolUse := block.AsToolUse() + args, _ := json.Marshal(toolUse.Input) + toolCalls = append(toolCalls, ToolCall{ + ID: toolUse.ID, + Type: "function", + Function: FunctionCall{ + Name: toolUse.Name, + Arguments: string(args), + }, + }) + } + } + + return ChatCompletionResponse{ + Choices: []ChatCompletionChoice{ + { + Message: ChatCompletionMessage{ + Role: "assistant", + Content: resp.Content[0].AsText().Text, + ToolCalls: toolCalls, + }, + FinishReason: string(resp.StopReason), + }, + }, + }, nil +} + +// CreateChatCompletionStream implements AIClient interface for Claude +func (a *ClaudeAdapter) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error) { + // Convert our messages to Claude's format + claudeMessages := make([]anthropic.MessageParam, len(req.Messages)) + for i, m := range req.Messages { + claudeMessages[i] = anthropic.NewUserMessage(anthropic.NewTextBlock(m.Content)) + } + + // Convert our tools to Claude's format + claudeTools := make([]anthropic.ToolUnionParam, len(req.Tools)) + for i, t := range req.Tools { + claudeTools[i] = anthropic.ToolUnionParamOfTool(anthropic.ToolInputSchemaParam{ + Type: "object", + Properties: t.Function.Parameters, + }, t.Function.Name) + } + + // Call Claude + stream := a.client.Messages.NewStreaming(ctx, anthropic.MessageNewParams{ + Model: anthropic.Model(req.Model), + Messages: claudeMessages, + Tools: claudeTools, + }) + + // Return our adapter for the stream + return &ClaudeStreamAdapter{stream: stream}, nil +} + +// ClaudeStreamAdapter adapts Claude's stream to our ChatCompletionStream interface +type ClaudeStreamAdapter struct { + stream *ssestream.Stream[anthropic.MessageStreamEventUnion] +} + +func (a *ClaudeStreamAdapter) Recv() (ChatCompletionStreamResponse, error) { + if !a.stream.Next() { + if err := a.stream.Err(); err != nil { + return ChatCompletionStreamResponse{}, err + } + return ChatCompletionStreamResponse{}, nil + } + + event := a.stream.Current() + switch event.Type { + case "message_delta": + delta := event.AsMessageDelta() + return ChatCompletionStreamResponse{ + Choices: []ChatCompletionStreamChoice{ + { + Delta: ChatCompletionStreamDelta{ + Role: "assistant", + FinishReason: string(delta.Delta.StopReason), + }, + }, + }, + }, nil + default: + return ChatCompletionStreamResponse{}, fmt.Errorf("unexpected event type: %s", event.Type) + } +} + +func (a *ClaudeStreamAdapter) Close() { + a.stream.Close() +} + +// NewClaudeChatService creates a new chat service using Claude +func NewClaudeChatService(apiKey string, logger *logger.Logger, chatService *ChatService, queries *db.Queries, projectsDir string) *OpenAIChatService { + return &OpenAIChatService{ + Client: NewClaudeAdapter(apiKey), + Logger: logger, + ChatService: chatService, + Queries: queries, + ProjectsDir: projectsDir, + } +} diff --git a/pkg/scai/ai/handlers.go b/pkg/scai/ai/handlers.go new file mode 100644 index 0000000..0ceea92 --- /dev/null +++ b/pkg/scai/ai/handlers.go @@ -0,0 +1,326 @@ +package ai + +import ( + "encoding/json" + "net/http" + "strconv" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/errors" + "github.com/chainlaunch/chainlaunch/pkg/http/response" + "github.com/chainlaunch/chainlaunch/pkg/scai/boilerplates" + "github.com/chainlaunch/chainlaunch/pkg/scai/projects" + "github.com/go-chi/chi/v5" +) + +// Model represents an AI model +type Model struct { + Name string `json:"name"` + Description string `json:"description"` + MaxTokens int `json:"maxTokens"` +} + +// Template represents a project template +type Template struct { + Name string `json:"name"` + Description string `json:"description"` +} + +// GenerateRequest represents a code generation request +type GenerateRequest struct { + ProjectID int64 `json:"projectId"` + Prompt string `json:"prompt"` +} + +// GenerateResponse represents a code generation response +type GenerateResponse struct { + Code string `json:"code"` +} + +// NewAIHandler creates a new instance of AIHandler with the required dependencies +func NewAIHandler(openAIService *OpenAIChatService, chatService *ChatService, projectsService *projects.ProjectsService, boilerplateService *boilerplates.BoilerplateService) *AIHandler { + return &AIHandler{ + OpenAIChatService: openAIService, + ChatService: chatService, + Projects: projectsService, + Boilerplates: boilerplateService, + } +} + +// AIHandler now has a ChatService field for dependency injection. +type AIHandler struct { + OpenAIChatService *OpenAIChatService + ChatService *ChatService + Projects *projects.ProjectsService + Boilerplates *boilerplates.BoilerplateService +} + +// RegisterRoutes registers all AI-related routes +func (h *AIHandler) RegisterRoutes(r chi.Router) { + r.Route("/ai", func(r chi.Router) { + r.Get("/boilerplates", response.Middleware(h.GetBoilerplates)) + r.Get("/models", response.Middleware(h.GetModels)) + r.Post("/generate", response.Middleware(h.Generate)) + r.Get("/{projectId}/conversations", response.Middleware(h.GetConversations)) + r.Get("/{projectId}/conversations/{conversationId}", response.Middleware(h.GetConversationMessages)) + r.Get("/{projectId}/conversations/{conversationId}/export", response.Middleware(h.GetConversationDetail)) + }) +} + +// GetBoilerplates godoc +// @Summary Get available boilerplates +// @Description Returns a list of available boilerplates filtered by network platform +// @Tags ai +// @Produce json +// @Param network_id query int true "Network ID to filter boilerplates by platform" +// @Success 200 {array} Boilerplate +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/boilerplates [get] +func (h *AIHandler) GetBoilerplates(w http.ResponseWriter, r *http.Request) error { + networkIDStr := r.URL.Query().Get("network_id") + if networkIDStr == "" { + return errors.NewValidationError("network_id is required", nil) + } + + networkID, err := strconv.ParseInt(networkIDStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid network_id", map[string]interface{}{ + "error": err.Error(), + }) + } + + // Get boilerplates for the network + boilerplates, err := h.Boilerplates.GetBoilerplatesByNetworkID(r.Context(), networkID) + if err != nil { + if err.Error() == "sql: no rows in result set" { + return errors.NewNotFoundError("network not found", nil) + } + return errors.NewInternalError("failed to get boilerplates", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, boilerplates) +} + +// GetModels godoc +// @Summary Get available AI models +// @Description Returns a list of available AI models for code generation +// @Tags ai +// @Produce json +// @Success 200 {array} Model +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/models [get] +func (h *AIHandler) GetModels(w http.ResponseWriter, r *http.Request) error { + models := []Model{ + { + Name: "GPT-4", + Description: "Most capable model, best for complex tasks", + MaxTokens: 8192, + }, + { + Name: "GPT-3.5", + Description: "Fast and efficient model for simpler tasks", + MaxTokens: 4096, + }, + } + return response.WriteJSON(w, http.StatusOK, models) +} + +// Generate godoc +// @Summary Generate code +// @Description Generates code based on the provided prompt and project context +// @Tags ai +// @Accept json +// @Produce json +// @Param request body GenerateRequest true "Generation request" +// @Success 200 {object} GenerateResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/generate [post] +func (h *AIHandler) Generate(w http.ResponseWriter, r *http.Request) error { + var req GenerateRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return errors.NewValidationError("invalid request body", map[string]interface{}{ + "error": err.Error(), + }) + } + + // Get project directly from the database + project, err := h.Projects.Queries.GetProject(r.Context(), req.ProjectID) + if err != nil { + if err.Error() == "sql: no rows in result set" { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + code, err := h.ChatService.GenerateCode(r.Context(), req.Prompt, project) + if err != nil { + return errors.NewInternalError("failed to generate code", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, GenerateResponse{ + Code: code, + }) +} + +// GetConversations godoc +// @Summary Get all conversations for a project +// @Description Returns a list of all chat conversations associated with a specific project +// @Tags ai +// @Produce json +// @Param projectId path int true "Project ID" +// @Success 200 {array} ConversationResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/{projectId}/conversations [get] +func (h *AIHandler) GetConversations(w http.ResponseWriter, r *http.Request) error { + projectID, err := strconv.ParseInt(chi.URLParam(r, "projectId"), 10, 64) + if err != nil { + return errors.NewValidationError("invalid project ID", map[string]interface{}{ + "error": err.Error(), + }) + } + + convs, err := h.ChatService.Queries.ListConversationsForProject(r.Context(), projectID) + if err != nil { + return errors.NewInternalError("failed to get conversations", err, nil) + } + + var resp []ConversationResponse + for _, c := range convs { + resp = append(resp, ConversationResponse{ + ID: c.ID, + ProjectID: c.ProjectID, + StartedAt: c.StartedAt.Format(time.RFC3339), + }) + } + + return response.WriteJSON(w, http.StatusOK, resp) +} + +// GetConversationMessages godoc +// @Summary Get conversation messages +// @Description Get all messages in a conversation +// @Tags ai +// @Produce json +// @Param projectId path int true "Project ID" +// @Param conversationId path int true "Conversation ID" +// @Success 200 {array} Message +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/{projectId}/conversations/{conversationId} [get] +func (h *AIHandler) GetConversationMessages(w http.ResponseWriter, r *http.Request) error { + projectID, err := strconv.ParseInt(chi.URLParam(r, "projectId"), 10, 64) + if err != nil { + return errors.NewValidationError("invalid project ID", map[string]interface{}{ + "error": err.Error(), + }) + } + + conversationID, err := strconv.ParseInt(chi.URLParam(r, "conversationId"), 10, 64) + if err != nil { + return errors.NewValidationError("invalid conversation ID", map[string]interface{}{ + "error": err.Error(), + }) + } + + messages, err := h.ChatService.GetConversationMessages(r.Context(), projectID, conversationID) + if err != nil { + if err.Error() == "sql: no rows in result set" { + return errors.NewNotFoundError("conversation not found", nil) + } + return errors.NewInternalError("failed to get conversation messages", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, messages) +} + +// GetConversationDetail godoc +// @Summary Get conversation detail +// @Description Get detailed information about a conversation including all messages and metadata +// @Tags ai +// @Produce json +// @Param projectId path int true "Project ID" +// @Param conversationId path int true "Conversation ID" +// @Success 200 {object} ConversationDetail +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/ai/{projectId}/conversations/{conversationId}/export [get] +func (h *AIHandler) GetConversationDetail(w http.ResponseWriter, r *http.Request) error { + projectID, err := strconv.ParseInt(chi.URLParam(r, "projectId"), 10, 64) + if err != nil { + return errors.NewValidationError("invalid project ID", map[string]interface{}{ + "error": err.Error(), + }) + } + + conversationID, err := strconv.ParseInt(chi.URLParam(r, "conversationId"), 10, 64) + if err != nil { + return errors.NewValidationError("invalid conversation ID", map[string]interface{}{ + "error": err.Error(), + }) + } + + detail, err := h.ChatService.GetConversationDetail(r.Context(), projectID, conversationID) + if err != nil { + if err.Error() == "sql: no rows in result set" { + return errors.NewNotFoundError("conversation not found", nil) + } + return errors.NewInternalError("failed to get conversation detail", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, detail) +} + +// ConversationResponse represents a conversation for API responses +// swagger:model +type ConversationResponse struct { + ID int64 `json:"id"` + ProjectID int64 `json:"projectId"` + StartedAt string `json:"startedAt"` +} + +// MessageResponse represents a message for API responses +// swagger:model +type MessageResponse struct { + ID int64 `json:"id"` + ConversationID int64 `json:"conversationId"` + Sender string `json:"sender"` + Content string `json:"content"` + CreatedAt string `json:"createdAt"` +} + +// MessageDetailResponse represents a message with tool calls +// swagger:model +type MessageDetailResponse struct { + ID int64 `json:"id"` + ConversationID int64 `json:"conversationId"` + Sender string `json:"sender"` + Content string `json:"content"` + CreatedAt string `json:"createdAt"` + ToolCalls []ToolCallResponse `json:"toolCalls"` +} + +// ToolCallResponse represents a tool call for API responses +// swagger:model +type ToolCallResponse struct { + ID int64 `json:"id"` + MessageID int64 `json:"messageId"` + ToolName string `json:"toolName"` + Arguments string `json:"arguments"` + Result string `json:"result"` + Error string `json:"error"` + CreatedAt string `json:"createdAt"` +} + +// ParentMessageDetailResponse represents a parent message with its children (tool calls, etc.) +// swagger:model +type ParentMessageDetailResponse struct { + Message MessageDetailResponse `json:"message"` + Children []MessageDetailResponse `json:"children"` +} diff --git a/pkg/scai/ai/openai.go b/pkg/scai/ai/openai.go new file mode 100644 index 0000000..9430eb0 --- /dev/null +++ b/pkg/scai/ai/openai.go @@ -0,0 +1,96 @@ +package ai + +import ( + "context" + + "github.com/openai/openai-go" + "github.com/openai/openai-go/option" + "github.com/openai/openai-go/packages/ssestream" +) + +type OpenAIAdapter struct { + client openai.Client +} + +func NewOpenAIAdapter(apiKey string) *OpenAIAdapter { + return &OpenAIAdapter{ + client: openai.NewClient(option.WithAPIKey(apiKey)), + } +} + +func (a *OpenAIAdapter) CreateChatCompletion(ctx context.Context, req ChatCompletionRequest) (ChatCompletionResponse, error) { + messages := make([]openai.ChatCompletionMessageParamUnion, len(req.Messages)) + for i, m := range req.Messages { + messages[i] = openai.UserMessage(m.Content) + } + + oaiReq := openai.ChatCompletionNewParams{ + Model: req.Model, + Messages: messages, + // Add more fields as needed (Tools, etc.) + } + + resp, err := a.client.Chat.Completions.New(ctx, oaiReq) + if err != nil { + return ChatCompletionResponse{}, err + } + + choices := make([]ChatCompletionChoice, len(resp.Choices)) + for i, c := range resp.Choices { + choices[i] = ChatCompletionChoice{ + Message: ChatCompletionMessage{ + Role: string(c.Message.Role), + Content: c.Message.Content, + }, + FinishReason: string(c.FinishReason), + } + } + + return ChatCompletionResponse{ + Choices: choices, + }, nil +} + +func (a *OpenAIAdapter) CreateChatCompletionStream(ctx context.Context, req ChatCompletionRequest) (ChatCompletionStream, error) { + messages := make([]openai.ChatCompletionMessageParamUnion, len(req.Messages)) + for i, m := range req.Messages { + messages[i] = openai.UserMessage(m.Content) + } + oaiReq := openai.ChatCompletionNewParams{ + Model: req.Model, + Messages: messages, + // Add more fields as needed (Tools, etc.) + } + stream := a.client.Chat.Completions.NewStreaming(ctx, oaiReq) + return &OpenAIStreamAdapter{stream: stream}, nil +} + +type OpenAIStreamAdapter struct { + stream *ssestream.Stream[openai.ChatCompletionChunk] +} + +func (a *OpenAIStreamAdapter) Recv() (ChatCompletionStreamResponse, error) { + if !a.stream.Next() { + if err := a.stream.Err(); err != nil { + return ChatCompletionStreamResponse{}, err + } + return ChatCompletionStreamResponse{}, nil // End of stream + } + chunk := a.stream.Current() + choices := make([]ChatCompletionStreamChoice, len(chunk.Choices)) + for i, c := range chunk.Choices { + choices[i] = ChatCompletionStreamChoice{ + Delta: ChatCompletionStreamDelta{ + Role: string(c.Delta.Role), + Content: c.Delta.Content, + }, + } + } + return ChatCompletionStreamResponse{ + Choices: choices, + }, nil +} + +func (a *OpenAIStreamAdapter) Close() { + a.stream.Close() +} diff --git a/pkg/scai/ai/service.go b/pkg/scai/ai/service.go new file mode 100644 index 0000000..18705fd --- /dev/null +++ b/pkg/scai/ai/service.go @@ -0,0 +1,97 @@ +package ai + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/chainlaunch/chainlaunch/pkg/db" + "go.uber.org/zap" +) + +// Boilerplate represents a project boilerplate template +type Boilerplate struct { + Name string `json:"name"` + Description string `json:"description"` + Platform string `json:"platform"` + Path string `json:"path"` +} + +// BoilerplateService handles boilerplate-related operations +type BoilerplateService struct { + Queries *db.Queries + BoilerplatesDir string +} + +// NewBoilerplateService creates a new BoilerplateService instance +func NewBoilerplateService(queries *db.Queries, boilerplatesDir string) *BoilerplateService { + return &BoilerplateService{ + Queries: queries, + BoilerplatesDir: boilerplatesDir, + } +} + +// GetBoilerplates returns a list of available boilerplates filtered by network platform +func (s *BoilerplateService) GetBoilerplates(ctx context.Context, networkID int64) ([]Boilerplate, error) { + // Get network platform from network ID + network, err := s.Queries.GetNetwork(ctx, networkID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, errors.New("network not found") + } + return nil, fmt.Errorf("failed to get network: %w", err) + } + + // List boilerplate directories + entries, err := os.ReadDir(s.BoilerplatesDir) + if err != nil { + return nil, fmt.Errorf("failed to read boilerplates directory: %w", err) + } + + var boilerplates []Boilerplate + for _, entry := range entries { + if !entry.IsDir() { + continue + } + + // Read boilerplate metadata + metadataPath := filepath.Join(s.BoilerplatesDir, entry.Name(), "metadata.json") + metadata, err := os.ReadFile(metadataPath) + if err != nil { + zap.L().Warn("failed to read boilerplate metadata", + zap.String("boilerplate", entry.Name()), + zap.Error(err)) + continue + } + + var meta struct { + Name string `json:"name"` + Description string `json:"description"` + Platform string `json:"platform"` + } + if err := json.Unmarshal(metadata, &meta); err != nil { + zap.L().Warn("failed to parse boilerplate metadata", + zap.String("boilerplate", entry.Name()), + zap.Error(err)) + continue + } + + // Filter by platform + if meta.Platform != network.Platform { + continue + } + + boilerplates = append(boilerplates, Boilerplate{ + Name: meta.Name, + Description: meta.Description, + Platform: meta.Platform, + Path: entry.Name(), + }) + } + + return boilerplates, nil +} diff --git a/pkg/scai/boilerplates/boilerplates.go b/pkg/scai/boilerplates/boilerplates.go new file mode 100644 index 0000000..b0f66d5 --- /dev/null +++ b/pkg/scai/boilerplates/boilerplates.go @@ -0,0 +1,313 @@ +package boilerplates + +import ( + "archive/tar" + "compress/gzip" + "context" + "embed" + "encoding/json" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "strings" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/db" + "github.com/google/go-github/v45/github" + "gopkg.in/yaml.v3" +) + +// BoilerplateConfig represents a boilerplate configuration +type BoilerplateConfig struct { + ID string `yaml:"-" json:"id"` // ID is the key in the configs map + Name string `yaml:"name" json:"name"` + Description string `yaml:"description" json:"description"` + Platform string `yaml:"platform" json:"platform"` + Command string `yaml:"command" json:"command"` + Args []string `yaml:"args" json:"args"` + Image string `yaml:"image" json:"image"` + RepoOwner string `yaml:"repoOwner" json:"repoOwner"` + RepoName string `yaml:"repoName" json:"repoName"` + RepoPath string `yaml:"repoPath,omitempty" json:"repoPath,omitempty"` +} + +// BoilerplatesConfig represents the top-level configuration structure +type BoilerplatesConfig struct { + Boilerplates map[string]BoilerplateConfig `yaml:"boilerplates"` +} + +// BoilerplateService manages boilerplate templates and their configurations +type BoilerplateService struct { + Queries *db.Queries + configs map[string]BoilerplateConfig + client *github.Client + owner string + repo string + path string + lastFetch time.Time +} + +// NewBoilerplateService creates a new BoilerplateService instance +func NewBoilerplateService(queries *db.Queries) (*BoilerplateService, error) { + service := &BoilerplateService{ + Queries: queries, + configs: make(map[string]BoilerplateConfig), + client: github.NewClient(nil), + } + + // Load configurations from the default location + if err := service.loadConfigs(); err != nil { + return nil, fmt.Errorf("failed to load boilerplate configs: %w", err) + } + + return service, nil +} + +// loadConfigs loads boilerplate configurations from the default location +func (s *BoilerplateService) loadConfigs() error { + // Load from the embedded YAML file + configPath := "configs/boilerplates.yaml" + data, err := embedFS.ReadFile(configPath) + if err != nil { + return fmt.Errorf("failed to read boilerplate configs: %w", err) + } + + var config BoilerplatesConfig + if err := yaml.Unmarshal(data, &config); err != nil { + return fmt.Errorf("failed to parse boilerplate configs: %w", err) + } + + // Set the ID field for each config + for id, boilerplateConfig := range config.Boilerplates { + boilerplateConfig.ID = id + config.Boilerplates[id] = boilerplateConfig + } + + s.configs = config.Boilerplates + return nil +} + +// downloadContents recursively downloads files and directories from GitHub +func (s *BoilerplateService) downloadContents(url string, targetDir string) error { + // Make the request to GitHub API + resp, err := http.Get(url) + if err != nil { + return fmt.Errorf("failed to fetch repository contents: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("failed to fetch repository contents: %s", resp.Status) + } + + // Read the response body + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to read response body: %w", err) + } + + // Parse the response as JSON + var contents []struct { + Name string `json:"name"` + Path string `json:"path"` + Type string `json:"type"` + DownloadURL string `json:"download_url"` + } + if err := json.Unmarshal(body, &contents); err != nil { + return fmt.Errorf("failed to parse response: %w", err) + } + + // Process each item + for _, item := range contents { + targetPath := filepath.Join(targetDir, item.Name) + + if item.Type == "dir" { + // Create directory and recursively download its contents + if err := os.MkdirAll(targetPath, 0755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", item.Name, err) + } + if err := s.downloadContents(item.DownloadURL, targetPath); err != nil { + return fmt.Errorf("failed to download directory %s: %w", item.Name, err) + } + } else if item.Type == "file" { + // Download the file + resp, err := http.Get(item.DownloadURL) + if err != nil { + return fmt.Errorf("failed to download file %s: %w", item.Name, err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("failed to download file %s: %s", item.Name, resp.Status) + } + + // Create the target directory if it doesn't exist + if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil { + return fmt.Errorf("failed to create directory for %s: %w", item.Name, err) + } + + // Create the target file + file, err := os.Create(targetPath) + if err != nil { + return fmt.Errorf("failed to create file %s: %w", item.Name, err) + } + defer file.Close() + + // Copy the file contents + if _, err := io.Copy(file, resp.Body); err != nil { + return fmt.Errorf("failed to write file %s: %w", item.Name, err) + } + } + } + + return nil +} + +// DownloadBoilerplate downloads a boilerplate from GitHub +func (s *BoilerplateService) DownloadBoilerplate(ctx context.Context, name, targetDir string) error { + config, err := s.GetBoilerplateConfig(name) + if err != nil { + return err + } + + // Create the target directory if it doesn't exist + if err := os.MkdirAll(targetDir, 0755); err != nil { + return fmt.Errorf("failed to create target directory: %w", err) + } + + // Construct the GitHub archive URL + url := fmt.Sprintf("https://github.com/%s/%s/archive/refs/heads/main.tar.gz", config.RepoOwner, config.RepoName) + + // Download the tarball + resp, err := http.Get(url) + if err != nil { + return fmt.Errorf("failed to download repository: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("failed to download repository: %s", resp.Status) + } + + // Create a gzip reader + gzr, err := gzip.NewReader(resp.Body) + if err != nil { + return fmt.Errorf("failed to create gzip reader: %w", err) + } + defer gzr.Close() + + // Create a tar reader + tr := tar.NewReader(gzr) + + // Extract the tarball + for { + header, err := tr.Next() + if err == io.EOF { + break + } + if err != nil { + return fmt.Errorf("failed to read tar header: %w", err) + } + + // Skip the root directory + if header.Name == fmt.Sprintf("%s-main/", config.RepoName) { + continue + } + + // If RepoPath is specified, only extract files from that path + if config.RepoPath != "" { + expectedPrefix := fmt.Sprintf("%s-main/%s/", config.RepoName, config.RepoPath) + if !strings.HasPrefix(header.Name, expectedPrefix) { + continue + } + } + + // Remove the root directory prefix + targetPath := strings.TrimPrefix(header.Name, fmt.Sprintf("%s-main/", config.RepoName)) + targetPath = filepath.Join(targetDir, targetPath) + + switch header.Typeflag { + case tar.TypeDir: + // Create directory + if err := os.MkdirAll(targetPath, 0755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", targetPath, err) + } + case tar.TypeReg: + // Create parent directories + if err := os.MkdirAll(filepath.Dir(targetPath), 0755); err != nil { + return fmt.Errorf("failed to create parent directory for %s: %w", targetPath, err) + } + + // Create the file + file, err := os.OpenFile(targetPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.FileMode(header.Mode)) + if err != nil { + return fmt.Errorf("failed to create file %s: %w", targetPath, err) + } + + // Copy the file contents + if _, err := io.Copy(file, tr); err != nil { + file.Close() + return fmt.Errorf("failed to write file %s: %w", targetPath, err) + } + file.Close() + } + } + + return nil +} + +// RefreshConfigs reloads the configurations from GitHub if they're older than the specified duration +func (s *BoilerplateService) RefreshConfigs(maxAge time.Duration) error { + if time.Since(s.lastFetch) > maxAge { + return s.loadConfigs() + } + return nil +} + +// GetBoilerplateConfig returns the configuration for a specific boilerplate +func (s *BoilerplateService) GetBoilerplateConfig(name string) (BoilerplateConfig, error) { + config, ok := s.configs[name] + if !ok { + return BoilerplateConfig{}, fmt.Errorf("boilerplate not found: %s", name) + } + return config, nil +} + +// GetBoilerplatesByPlatform returns all boilerplates for a specific platform +func (s *BoilerplateService) GetBoilerplatesByPlatform(platform string) []BoilerplateConfig { + var result []BoilerplateConfig + for id, config := range s.configs { + if config.Platform == platform { + config.ID = id + result = append(result, config) + } + } + return result +} + +// GetBoilerplates returns all available boilerplates +func (s *BoilerplateService) GetBoilerplates() []BoilerplateConfig { + var result []BoilerplateConfig + for id, config := range s.configs { + config.ID = id + result = append(result, config) + } + return result +} + +// GetBoilerplatesByNetworkID returns all boilerplates for a specific network +func (s *BoilerplateService) GetBoilerplatesByNetworkID(ctx context.Context, networkID int64) ([]BoilerplateConfig, error) { + // Get network platform from database + network, err := s.Queries.GetNetwork(ctx, networkID) + if err != nil { + return nil, fmt.Errorf("failed to get network: %w", err) + } + + // Get boilerplates for the network's platform + return s.GetBoilerplatesByPlatform(network.Platform), nil +} + +//go:embed configs/boilerplates.yaml +var embedFS embed.FS diff --git a/pkg/scai/boilerplates/configs/boilerplates.yaml b/pkg/scai/boilerplates/configs/boilerplates.yaml new file mode 100644 index 0000000..e40b841 --- /dev/null +++ b/pkg/scai/boilerplates/configs/boilerplates.yaml @@ -0,0 +1,10 @@ +boilerplates: + chaincode-fabric-ts: + name: Chaincode Fabric TypeScript + description: A TypeScript-based Hyperledger Fabric chaincode project + platform: fabric + command: npm + args: ['run', 'start:dev'] + image: docker.io/kfsoftware/chainlaunch-fabric-ts:0.0.1 + repoOwner: chainlaunch + repoName: chaincode-fabric-ts-tmpl diff --git a/pkg/scai/dirs/dirs.go b/pkg/scai/dirs/dirs.go new file mode 100644 index 0000000..4592711 --- /dev/null +++ b/pkg/scai/dirs/dirs.go @@ -0,0 +1,106 @@ +package dirs + +import ( + "errors" + "os" + "path/filepath" +) + +type DirsService struct { + Root string + // Add DB or project service reference here if needed for project validation +} + +func NewDirsService(root string) *DirsService { + return &DirsService{Root: root} +} + +// Placeholder for project validation +func (s *DirsService) validateProject(project string) error { + if project == "" { + return errors.New("project is required") + } + // TODO: Implement real project existence check + return nil +} + +func (s *DirsService) ListDirs(project, dir string) ([]string, error) { + if err := s.validateProject(project); err != nil { + return nil, err + } + if dir == "" { + dir = "." + } + // Scope to project root + base := filepath.Join(project, dir) + entries, err := os.ReadDir(base) + if err != nil { + return nil, err + } + var dirs []string + for _, entry := range entries { + if entry.IsDir() { + dirs = append(dirs, entry.Name()) + } + } + return dirs, nil +} + +func (s *DirsService) CreateDir(project, dir string) error { + if err := s.validateProject(project); err != nil { + return err + } + if dir == "" { + return errors.New("dir is required") + } + base := filepath.Join(project, dir) + return os.MkdirAll(base, 0755) +} + +func (s *DirsService) DeleteDir(project, dir string) error { + if err := s.validateProject(project); err != nil { + return err + } + if dir == "" { + return errors.New("dir is required") + } + base := filepath.Join(project, dir) + return os.RemoveAll(base) +} + +// ListEntries returns files, directories, and skipped directories in a given directory +func (s *DirsService) ListEntries(project, dir string) (files, directories, skipped []string, err error) { + if err := s.validateProject(project); err != nil { + return nil, nil, nil, err + } + if dir == "" { + dir = "." + } + base := filepath.Join(project, dir) + entries, err := os.ReadDir(base) + if err != nil { + return nil, nil, nil, err + } + var filesOut, dirsOut, skippedOut []string + const maxEntries = 1000 + skipList := map[string]struct{}{"node_modules": {}} + for _, entry := range entries { + if entry.IsDir() { + name := entry.Name() + if _, skip := skipList[name]; skip { + skippedOut = append(skippedOut, name) + continue + } + dirPath := filepath.Join(base, name) + dirEntries, err := os.ReadDir(dirPath) + if err == nil && len(dirEntries) > maxEntries { + skippedOut = append(skippedOut, name) + continue + } + dirsOut = append(dirsOut, name) + } else { + filesOut = append(filesOut, entry.Name()) + } + } + return filesOut, dirsOut, skippedOut, nil +} diff --git a/pkg/scai/dirs/handlers.go b/pkg/scai/dirs/handlers.go new file mode 100644 index 0000000..f1964d7 --- /dev/null +++ b/pkg/scai/dirs/handlers.go @@ -0,0 +1,209 @@ +package dirs + +import ( + "encoding/json" + "fmt" + "net/http" + "path/filepath" + "strconv" + + "github.com/chainlaunch/chainlaunch/pkg/errors" + "github.com/chainlaunch/chainlaunch/pkg/http/response" + "github.com/chainlaunch/chainlaunch/pkg/scai/projects" + "github.com/go-chi/chi/v5" + "go.uber.org/zap" +) +// NewDirsHandler creates a new instance of DirsHandler +func NewDirsHandler(service *DirsService, projectsService *projects.ProjectsService) *DirsHandler { + return &DirsHandler{ + Service: service, + ProjectsService: projectsService, + } +} + +type DirsHandler struct { + Service *DirsService + ProjectsService *projects.ProjectsService +} + +type CreateDirRequest struct { + Project string `json:"project" example:"myproject" description:"Project name"` + Dir string `json:"dir" example:"newdir" description:"Directory to create, relative to project root"` +} + +type CreateDirResponse struct { + Status string `json:"status" example:"created" description:"Status message"` +} + +type DeleteDirRequest struct { + Project string `json:"project" example:"myproject" description:"Project name"` + Dir string `json:"dir" example:"olddir" description:"Directory to delete, relative to project root"` +} + +type DeleteDirResponse struct { + Status string `json:"status" example:"deleted" description:"Status message"` +} + +// ListEntriesResponse is a unified response for listing both files and directories +// @Description Unified response for listing files and directories in a directory +// @Success 200 {object} ListEntriesResponse +// @Failure 400 {object} errors.ErrorResponse +// @Failure 401 {object} errors.ErrorResponse +// @Failure 403 {object} errors.ErrorResponse +// @Failure 404 {object} errors.ErrorResponse +// @Failure 409 {object} errors.ErrorResponse +// @Failure 422 {object} errors.ErrorResponse +// @Failure 500 {object} errors.ErrorResponse +// @Router /api/v1/entries/list [get] +type ListEntriesResponse struct { + Files []string `json:"files" example:"[\"main.go\",\"README.md\"]" description:"List of file names"` + Directories []string `json:"directories" example:"[\"src\",\"docs\"]" description:"List of directory names"` + Skipped []string `json:"skipped,omitempty" example:"[\"node_modules\"]" description:"Directories skipped due to size or policy"` +} + +// RegisterRoutes registers directory endpoints to the router, now project-scoped +func (h *DirsHandler) RegisterRoutes(r chi.Router) { + r.Route("/projects/{projectId}/dirs", func(r chi.Router) { + r.Post("/create", response.Middleware(h.CreateDir)) + r.Delete("/delete", response.Middleware(h.DeleteDir)) + r.Get("/list", response.Middleware(h.ListEntries)) + }) +} + +func (h *DirsHandler) getProjectRoot(r *http.Request) (string, error) { + projectIdStr := chi.URLParam(r, "projectId") + if projectIdStr == "" { + return "", fmt.Errorf("projectId is required") + } + projectId, err := strconv.ParseInt(projectIdStr, 10, 64) + if err != nil { + return "", fmt.Errorf("invalid projectId") + } + proj, err := h.ProjectsService.GetProject(r.Context(), projectId) + if err != nil { + return "", fmt.Errorf("project not found: %w", err) + } + return filepath.Join(h.ProjectsService.ProjectsDir, proj.Slug), nil +} + +// CreateDir godoc +// @Summary Create a directory +// @Description Create a new directory in a project +// @Tags directories +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param request body CreateDirRequest true "Directory create info" +// @Success 201 {object} CreateDirResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/dirs/create [post] +func (h *DirsHandler) CreateDir(w http.ResponseWriter, r *http.Request) error { + var req CreateDirRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return errors.NewValidationError("invalid request body", map[string]interface{}{ + "error": err.Error(), + }) + } + if req.Dir == "" { + return errors.NewValidationError("dir is required", nil) + } + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + // Example: forbidden directory name + if req.Dir == "forbidden" { + return errors.NewAuthorizationError("directory name is forbidden", nil) + } + // Example: conflict (directory already exists) + if req.Dir == "conflict" { + return errors.NewConflictError("directory already exists", nil) + } + if err := h.Service.CreateDir(projectRoot, req.Dir); err != nil { + return errors.NewInternalError("failed to create directory", err, nil) + } + + zap.L().Info("created dir", zap.String("projectRoot", projectRoot), zap.String("dir", req.Dir)) + return response.WriteJSON(w, http.StatusCreated, CreateDirResponse{Status: "created"}) +} + +// DeleteDir godoc +// @Summary Delete a directory +// @Description Delete a directory in a project +// @Tags directories +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param project query string true "Project name" +// @Param dir query string true "Directory to delete, relative to project root" +// @Success 200 {object} DeleteDirResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/dirs/delete [delete] +func (h *DirsHandler) DeleteDir(w http.ResponseWriter, r *http.Request) error { + dir := r.URL.Query().Get("dir") + if dir == "" { + return errors.NewValidationError("dir is required", nil) + } + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + if err := h.Service.DeleteDir(projectRoot, dir); err != nil { + return errors.NewInternalError("failed to delete directory", err, nil) + } + + zap.L().Info("deleted dir", zap.String("projectRoot", projectRoot), zap.String("dir", dir)) + return response.WriteJSON(w, http.StatusOK, DeleteDirResponse{Status: "deleted"}) +} + +// ListEntries godoc +// @Summary List files and directories +// @Description List files and directories in a given project and directory. Large directories (e.g., node_modules) are summarized/skipped. +// @Tags directories +// @Produce json +// @Param projectId path int true "Project ID" +// @Param dir query string false "Directory to list, relative to project root" +// @Success 200 {object} ListEntriesResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/dirs/list [get] +func (h *DirsHandler) ListEntries(w http.ResponseWriter, r *http.Request) error { + dir := r.URL.Query().Get("dir") + if dir == "" { + dir = "." + } + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + files, dirs, skipped, err := h.Service.ListEntries(projectRoot, dir) + if err != nil { + return errors.NewInternalError("failed to list entries", err, nil) + } + + zap.L().Info("listed entries", zap.String("projectRoot", projectRoot), zap.String("dir", dir), zap.Int("files", len(files)), zap.Int("dirs", len(dirs)), zap.Int("skipped", len(skipped))) + return response.WriteJSON(w, http.StatusOK, ListEntriesResponse{Files: files, Directories: dirs, Skipped: skipped}) +} diff --git a/pkg/scai/files/files.go b/pkg/scai/files/files.go new file mode 100644 index 0000000..a7a901d --- /dev/null +++ b/pkg/scai/files/files.go @@ -0,0 +1,115 @@ +package files + +import ( + "errors" + "io/ioutil" + "os" + "path/filepath" +) + +type FilesService struct { +} + +func NewFilesService() *FilesService { + return &FilesService{} +} + +// Placeholder for project validation +func (s *FilesService) validateProject(project string) error { + if project == "" { + return errors.New("project is required") + } + // TODO: Implement real project existence check + return nil +} + +func (s *FilesService) ListFiles(project, dir string) ([]string, error) { + if err := s.validateProject(project); err != nil { + return nil, err + } + if dir == "" { + dir = "." + } + base := filepath.Join(project, dir) + entries, err := ioutil.ReadDir(base) + if err != nil { + return nil, err + } + var files []string + for _, entry := range entries { + if !entry.IsDir() { + files = append(files, entry.Name()) + } + } + return files, nil +} + +func (s *FilesService) ReadFile(project, path string) ([]byte, error) { + if err := s.validateProject(project); err != nil { + return nil, err + } + if path == "" { + return nil, errors.New("path is required") + } + base := filepath.Join(project, path) + return ioutil.ReadFile(base) +} + +func (s *FilesService) WriteFile(project, path string, data []byte) error { + if err := s.validateProject(project); err != nil { + return err + } + if path == "" { + return errors.New("path is required") + } + base := filepath.Join(project, path) + return ioutil.WriteFile(base, data, 0644) +} + +func (s *FilesService) DeleteFile(project, path string) error { + if err := s.validateProject(project); err != nil { + return err + } + if path == "" { + return errors.New("path is required") + } + base := filepath.Join(project, path) + return os.Remove(base) +} + +// ListEntries returns files, directories, and skipped directories in a given directory +func (s *FilesService) ListEntries(project, dir string) (files, directories, skipped []string, err error) { + if err := s.validateProject(project); err != nil { + return nil, nil, nil, err + } + if dir == "" { + dir = "." + } + base := filepath.Join(project, dir) + entries, err := ioutil.ReadDir(base) + if err != nil { + return nil, nil, nil, err + } + var filesOut, dirsOut, skippedOut []string + const maxEntries = 1000 + skipList := map[string]struct{}{"node_modules": {}} + for _, entry := range entries { + if entry.IsDir() { + name := entry.Name() + if _, skip := skipList[name]; skip { + skippedOut = append(skippedOut, name) + continue + } + dirPath := filepath.Join(base, name) + dirEntries, err := ioutil.ReadDir(dirPath) + if err == nil && len(dirEntries) > maxEntries { + skippedOut = append(skippedOut, name) + continue + } + dirsOut = append(dirsOut, name) + } else { + filesOut = append(filesOut, entry.Name()) + } + } + return filesOut, dirsOut, skippedOut, nil +} diff --git a/pkg/scai/files/handlers.go b/pkg/scai/files/handlers.go new file mode 100644 index 0000000..5a587f5 --- /dev/null +++ b/pkg/scai/files/handlers.go @@ -0,0 +1,350 @@ +package files + +import ( + "encoding/json" + "fmt" + "net/http" + "os" + "path/filepath" + "strconv" + + "github.com/chainlaunch/chainlaunch/pkg/errors" + "github.com/chainlaunch/chainlaunch/pkg/http/response" + "github.com/chainlaunch/chainlaunch/pkg/scai/projects" + "github.com/go-chi/chi/v5" + "go.uber.org/zap" +) +// NewFilesHandler creates a new instance of FilesHandler +func NewFilesHandler(service *FilesService, projectsService *projects.ProjectsService) *FilesHandler { + return &FilesHandler{ + Service: service, + ProjectsService: projectsService, + } +} + +type FilesHandler struct { + Service *FilesService + ProjectsService *projects.ProjectsService +} + +type ListFilesResponse struct { + Files []string `json:"files" example:"[\"main.go\",\"README.md\"]" description:"List of file names"` +} + +type ReadFileResponse struct { + Content string `json:"content" example:"file contents" description:"File contents as string"` +} + +type WriteFileRequest struct { + Project string `json:"project" example:"myproject" description:"Project name"` + Path string `json:"path" example:"main.go" description:"File path relative to project root"` + Content string `json:"content" example:"new file contents" description:"New file contents as string"` +} + +type WriteFileResponse struct { + Status string `json:"status" example:"written" description:"Status message"` +} + +type DeleteFileRequest struct { + Project string `json:"project" example:"myproject" description:"Project name"` + Path string `json:"path" example:"main.go" description:"File path relative to project root"` +} + +type DeleteFileResponse struct { + Status string `json:"status" example:"deleted" description:"Status message"` +} + +// ListEntriesResponse is a unified response for listing both files and directories +// @Description Unified response for listing files and directories in a directory +// @Success 200 {object} ListEntriesResponse +// @Failure 400 {object} errors.ErrorResponse +// @Failure 401 {object} errors.ErrorResponse +// @Failure 403 {object} errors.ErrorResponse +// @Failure 404 {object} errors.ErrorResponse +// @Failure 409 {object} errors.ErrorResponse +// @Failure 422 {object} errors.ErrorResponse +// @Failure 500 {object} errors.ErrorResponse +// @Router /api/v1/entries/list [get] +type ListEntriesResponse struct { + Files []string `json:"files" example:"[\"main.go\",\"README.md\"]" description:"List of file names"` + Directories []string `json:"directories" example:"[\"src\",\"docs\"]" description:"List of directory names"` + Skipped []string `json:"skipped,omitempty" example:"[\"node_modules\"]" description:"Directories skipped due to size or policy"` +} + +// DirectoryTreeNode represents a node in the directory tree +// swagger:model +type DirectoryTreeNode struct { + Name string `json:"name"` + Path string `json:"path"` + IsDir bool `json:"isDir"` + Children []*DirectoryTreeNode `json:"children,omitempty"` +} + +// RegisterRoutes registers file endpoints to the router, now project-scoped +func (h *FilesHandler) RegisterRoutes(r chi.Router) { + r.Route("/projects/{projectId}/files", func(r chi.Router) { + r.Get("/read", response.Middleware(h.ReadFile)) + r.Post("/write", response.Middleware(h.WriteFile)) + r.Delete("/delete", response.Middleware(h.DeleteFile)) + r.Get("/list", response.Middleware(h.ListFiles)) + r.Get("/entries", response.Middleware(h.ListEntries)) + }) +} + +func (h *FilesHandler) getProjectRoot(r *http.Request) (string, error) { + projectIdStr := chi.URLParam(r, "projectId") + if projectIdStr == "" { + return "", fmt.Errorf("projectId is required") + } + projectId, err := strconv.ParseInt(projectIdStr, 10, 64) + if err != nil { + return "", fmt.Errorf("invalid projectId") + } + proj, err := h.ProjectsService.GetProject(r.Context(), projectId) + if err != nil { + return "", fmt.Errorf("project not found: %w", err) + } + return filepath.Join(h.ProjectsService.ProjectsDir, proj.Slug), nil +} + +// ListFiles godoc +// @Summary List files +// @Description List files in a given project and directory +// @Tags files +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param dir query string false "Directory to list, relative to project root" +// @Success 200 {object} ListFilesResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/files/list [get] +func (h *FilesHandler) ListFiles(w http.ResponseWriter, r *http.Request) error { + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + dir := r.URL.Query().Get("dir") + if dir == "" { + dir = "." + } + files, err := h.Service.ListFiles(projectRoot, dir) + if err != nil { + return errors.NewInternalError("failed to list files", err, nil) + } + + zap.L().Info("listed files", zap.String("projectRoot", projectRoot), zap.String("dir", dir), zap.Int("count", len(files))) + return response.WriteJSON(w, http.StatusOK, ListFilesResponse{Files: files}) +} + +// ReadFile godoc +// @Summary Read file contents +// @Description Get the contents of a file in a project +// @Tags files +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param path query string true "File path relative to project root" +// @Success 200 {object} ReadFileResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/files/read [get] +func (h *FilesHandler) ReadFile(w http.ResponseWriter, r *http.Request) error { + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + path := r.URL.Query().Get("path") + if path == "" { + return errors.NewValidationError("path is required", nil) + } + // Example: forbidden file + if path == "forbidden.txt" { + return errors.NewAuthorizationError("access to this file is forbidden", nil) + } + // Example: not found + if path == "notfound.txt" { + return errors.NewNotFoundError("file not found", nil) + } + content, err := h.Service.ReadFile(projectRoot, path) + if err != nil { + return errors.NewInternalError("failed to read file", err, nil) + } + + zap.L().Info("read file", zap.String("projectRoot", projectRoot), zap.String("path", path), zap.Int("size", len(content))) + return response.WriteJSON(w, http.StatusOK, ReadFileResponse{Content: string(content)}) +} + +// WriteFile godoc +// @Summary Write file contents +// @Description Write or modify the contents of a file in a project +// @Tags files +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param request body WriteFileRequest true "File write info" +// @Success 201 {object} WriteFileResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/files/write [post] +func (h *FilesHandler) WriteFile(w http.ResponseWriter, r *http.Request) error { + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + var req WriteFileRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return errors.NewValidationError("invalid request body", map[string]interface{}{ + "error": err.Error(), + }) + } + if req.Path == "" { + return errors.NewValidationError("path is required", nil) + } + // Example: forbidden file + if req.Path == "forbidden.txt" { + return errors.NewAuthorizationError("writing to this file is forbidden", nil) + } + // Example: conflict (file already exists) + if req.Path == "conflict.txt" { + return errors.NewConflictError("file already exists", nil) + } + if err := h.Service.WriteFile(projectRoot, req.Path, []byte(req.Content)); err != nil { + return errors.NewInternalError("failed to write file", err, nil) + } + + zap.L().Info("wrote file", zap.String("projectRoot", projectRoot), zap.String("path", req.Path), zap.Int("size", len(req.Content))) + return response.WriteJSON(w, http.StatusCreated, WriteFileResponse{Status: "written"}) +} + +// DeleteFile godoc +// @Summary Delete a file +// @Description Delete a file in a project +// @Tags files +// @Accept json +// @Produce json +// @Param projectId path int true "Project ID" +// @Param path query string true "File path relative to project root" +// @Success 200 {object} DeleteFileResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/files/delete [delete] +func (h *FilesHandler) DeleteFile(w http.ResponseWriter, r *http.Request) error { + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + path := r.URL.Query().Get("path") + if path == "" { + return errors.NewValidationError("path is required", nil) + } + // Example: forbidden file + if path == "forbidden.txt" { + return errors.NewAuthorizationError("deleting this file is forbidden", nil) + } + // Example: not found + if path == "notfound.txt" { + return errors.NewNotFoundError("file not found", nil) + } + if err := h.Service.DeleteFile(projectRoot, path); err != nil { + return errors.NewInternalError("failed to delete file", err, nil) + } + + zap.L().Info("deleted file", zap.String("projectRoot", projectRoot), zap.String("path", path)) + return response.WriteJSON(w, http.StatusOK, DeleteFileResponse{Status: "deleted"}) +} + +// ListEntries godoc +// @Summary List full project directory tree +// @Description List the full directory tree for a project, excluding large/ignored folders (e.g., node_modules, .git) +// @Tags files +// @Produce json +// @Param projectId path int true "Project ID" +// @Success 200 {object} DirectoryTreeNode +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{projectId}/files/entries [get] +func (h *FilesHandler) ListEntries(w http.ResponseWriter, r *http.Request) error { + projectRoot, err := h.getProjectRoot(r) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + tree, err := buildDirectoryTree(projectRoot, projectRoot) + if err != nil { + return errors.NewInternalError("failed to build directory tree", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, tree) +} + +// buildDirectoryTree recursively builds the directory tree, excluding ignored folders +func buildDirectoryTree(root, current string) (*DirectoryTreeNode, error) { + ignored := map[string]bool{ + "node_modules": true, + ".git": true, + ".DS_Store": true, + } + info, err := os.Stat(current) + if err != nil { + return nil, err + } + relPath, _ := filepath.Rel(root, current) + node := &DirectoryTreeNode{ + Name: info.Name(), + Path: relPath, + IsDir: info.IsDir(), + } + if !info.IsDir() { + return node, nil + } + entries, err := os.ReadDir(current) + if err != nil { + return nil, err + } + for _, entry := range entries { + if ignored[entry.Name()] { + continue + } + childPath := filepath.Join(current, entry.Name()) + childNode, err := buildDirectoryTree(root, childPath) + if err == nil { + node.Children = append(node.Children, childNode) + } + } + return node, nil +} diff --git a/pkg/scai/projectrunner/boilerplate.go b/pkg/scai/projectrunner/boilerplate.go new file mode 100644 index 0000000..9d9b473 --- /dev/null +++ b/pkg/scai/projectrunner/boilerplate.go @@ -0,0 +1,31 @@ +package projectrunner + +import ( + "fmt" + + "github.com/chainlaunch/chainlaunch/pkg/scai/boilerplates" +) + +// BoilerplateRunnerConfig defines how to run a project for a given boilerplate type. +type BoilerplateRunnerConfig struct { + Command string + Args []string + Image string // Docker image to use for this boilerplate +} + +var boilerplateRunners = map[string]BoilerplateRunnerConfig{ + "chaincode-fabric-ts": { + Args: []string{"npm", "run", "start:dev"}, + Image: "chaincode-ts:1.0", + }, +} + +// GetBoilerplateRunner returns the command, args, and image for a given boilerplate type +func GetBoilerplateRunner(boilerplateService *boilerplates.BoilerplateService, boilerplateType string) (string, []string, string, error) { + config, err := boilerplateService.GetBoilerplateConfig(boilerplateType) + if err != nil { + return "", nil, "", fmt.Errorf("unknown boilerplate type: %s", boilerplateType) + } + + return config.Command, config.Args, config.Image, nil +} diff --git a/pkg/scai/projectrunner/runner.go b/pkg/scai/projectrunner/runner.go new file mode 100644 index 0000000..b0177fd --- /dev/null +++ b/pkg/scai/projectrunner/runner.go @@ -0,0 +1,283 @@ +package projectrunner + +import ( + "context" + "database/sql" + "fmt" + "io" + "strconv" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/db" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/image" + "github.com/docker/docker/api/types/mount" + "github.com/docker/docker/client" + "github.com/docker/docker/errdefs" + "github.com/docker/go-connections/nat" +) + +type Runner struct { + docker *client.Client + queries *db.Queries +} + +func NewRunner(queries *db.Queries) *Runner { + cli, err := client.NewClientWithOpts(client.FromEnv, client.WithAPIVersionNegotiation()) + if err != nil { + panic(err) + } + return &Runner{ + docker: cli, + queries: queries, + } +} + +func (r *Runner) Start(ctx context.Context, projectID string, projectDir string, imageName string, port int, env map[string]string, args ...string) (int, error) { + containerName := fmt.Sprintf("project-%s", projectID) + + // Remove any existing container with the same name + dockerContainer, err := r.docker.ContainerInspect(ctx, containerName) + if err == nil { + _ = r.docker.ContainerRemove(ctx, dockerContainer.ID, container.RemoveOptions{Force: true}) + } + + // Check if image exists locally + _, err = r.docker.ImageInspect(ctx, imageName) + if errdefs.IsNotFound(err) { + // Pull the image if not found locally + rc, err := r.docker.ImagePull(ctx, imageName, image.PullOptions{}) + if err != nil { + return 0, fmt.Errorf("failed to pull image %s: %w", imageName, err) + } + _, err = io.Copy(io.Discard, rc) + if err != nil { + return 0, fmt.Errorf("failed to pull image %s: %w", imageName, err) + } + } else if err != nil { + return 0, fmt.Errorf("failed to inspect image %s: %w", imageName, err) + } + + // Create container host config with port binding + containerHostConfig := &container.HostConfig{ + PortBindings: nat.PortMap{ + nat.Port("4000/tcp"): []nat.PortBinding{ + { + HostIP: "0.0.0.0", + HostPort: strconv.Itoa(port), + }, + }, + }, + Mounts: []mount.Mount{ + { + Type: mount.TypeBind, + Source: projectDir, + Target: "/app", + }, + }, + } + + // Convert environment map to slice + envSlice := make([]string, 0, len(env)) + for k, v := range env { + envSlice = append(envSlice, fmt.Sprintf("%s=%s", k, v)) + } + + containerConfig := &container.Config{ + Image: imageName, + Cmd: args, + Tty: false, + WorkingDir: "/app", + Env: envSlice, + ExposedPorts: nat.PortSet{ + nat.Port("4000/tcp"): struct{}{}, + }, + } + resp, err := r.docker.ContainerCreate(ctx, containerConfig, containerHostConfig, nil, nil, containerName) + if err != nil { + return 0, err + } + if err := r.docker.ContainerStart(ctx, resp.ID, container.StartOptions{}); err != nil { + return 0, err + } + + // Wait for container to be ready + time.Sleep(2 * time.Second) + + // Update DB with running status + idInt64, _ := parseProjectID(projectID) + now := time.Now() + err = r.queries.UpdateProjectContainerInfo(ctx, &db.UpdateProjectContainerInfoParams{ + ContainerID: sqlNullString(resp.ID), + ContainerName: sqlNullString(containerName), + Status: sqlNullString("running"), + LastStartedAt: sqlNullTime(now), + LastStoppedAt: sqlNullTimeZero(), + ContainerPort: sql.NullInt64{Int64: int64(port), Valid: true}, + ID: idInt64, + }) + if err != nil { + return 0, err + } + return port, nil +} + +func (r *Runner) Stop(projectID string) error { + ctx := context.Background() + idInt64, _ := parseProjectID(projectID) + proj, err := r.queries.GetProject(ctx, idInt64) + if err != nil { + return err + } + if !proj.Status.Valid || proj.Status.String != "running" { + return nil + } + timeout := 5 + if err := r.docker.ContainerStop(ctx, proj.ContainerID.String, container.StopOptions{Timeout: &timeout}); err != nil { + return err + } + now := time.Now() + return r.queries.UpdateProjectContainerInfo(ctx, &db.UpdateProjectContainerInfoParams{ + ContainerID: proj.ContainerID, + ContainerName: proj.ContainerName, + Status: sqlNullString("stopped"), + LastStartedAt: proj.LastStartedAt, + LastStoppedAt: sqlNullTime(now), + ID: idInt64, + }) +} + +func (r *Runner) Restart(projectID, dir, image string, args ...string) error { + r.Stop(projectID) + _, err := r.Start(context.Background(), projectID, dir, image, 4000, nil, args...) + return err +} + +func (r *Runner) GetLogs(projectID string) (string, error) { + ctx := context.Background() + idInt64, _ := parseProjectID(projectID) + proj, err := r.queries.GetProject(ctx, idInt64) + if err != nil { + return "", err + } + + // Check if container exists + _, err = r.docker.ContainerInspect(ctx, proj.ContainerID.String) + if err != nil { + return "", fmt.Errorf("container not found for project %s: %w", projectID, err) + } + + reader, err := r.docker.ContainerLogs(ctx, proj.ContainerID.String, container.LogsOptions{ + ShowStdout: true, + ShowStderr: true, + Timestamps: true, + Tail: "1000", + }) + if err != nil { + return "", err + } + defer reader.Close() + + var logs []byte + header := make([]byte, 8) + for { + // Read the 8-byte header + _, err := io.ReadFull(reader, header) + if err != nil { + if err != io.EOF { + return "", fmt.Errorf("failed to read docker log header: %w", err) + } + break + } + // Get the payload length + length := int(uint32(header[4])<<24 | uint32(header[5])<<16 | uint32(header[6])<<8 | uint32(header[7])) + if length == 0 { + continue + } + // Read the payload + payload := make([]byte, length) + _, err = io.ReadFull(reader, payload) + if err != nil { + if err != io.EOF { + return "", fmt.Errorf("failed to read docker log payload: %w", err) + } + break + } + logs = append(logs, payload...) + } + return string(logs), nil +} + +func (r *Runner) StreamLogs(ctx context.Context, projectID string, onLog func([]byte)) error { + idInt64, _ := parseProjectID(projectID) + proj, err := r.queries.GetProject(ctx, idInt64) + if err != nil { + return err + } + + // Check if container exists + _, err = r.docker.ContainerInspect(ctx, proj.ContainerID.String) + if err != nil { + return fmt.Errorf("container not found for project %s: %w", projectID, err) + } + reader, err := r.docker.ContainerLogs(ctx, proj.ContainerID.String, container.LogsOptions{ + ShowStdout: true, + ShowStderr: true, + Timestamps: true, + Tail: "100", + Follow: true, + }) + if err != nil { + return err + } + defer reader.Close() + + header := make([]byte, 8) + for { + // Read the 8-byte header + _, err := io.ReadFull(reader, header) + if err != nil { + if err != io.EOF { + return fmt.Errorf("failed to read docker log header: %w", err) + } + return nil + } + // Get the payload length + length := int(uint32(header[4])<<24 | uint32(header[5])<<16 | uint32(header[6])<<8 | uint32(header[7])) + if length == 0 { + continue + } + // Read the payload + payload := make([]byte, length) + _, err = io.ReadFull(reader, payload) + if err != nil { + if err != io.EOF { + return fmt.Errorf("failed to read docker log payload: %w", err) + } + return nil + } + + select { + case <-ctx.Done(): + return nil + default: + onLog(payload) + } + } +} + +// Helpers +func sqlNullString(s string) sql.NullString { + return sql.NullString{String: s, Valid: s != ""} +} +func sqlNullTime(t time.Time) sql.NullTime { + return sql.NullTime{Time: t, Valid: !t.IsZero()} +} +func sqlNullTimeZero() sql.NullTime { + return sql.NullTime{Valid: false} +} +func parseProjectID(id string) (int64, error) { + var i int64 + _, err := fmt.Sscanf(id, "%d", &i) + return i, err +} diff --git a/pkg/scai/projects/fabric_lifecycle.go b/pkg/scai/projects/fabric_lifecycle.go new file mode 100644 index 0000000..1b4590f --- /dev/null +++ b/pkg/scai/projects/fabric_lifecycle.go @@ -0,0 +1,681 @@ +package projects + +import ( + "archive/tar" + "bytes" + "compress/gzip" + "context" + "database/sql" + "encoding/json" + "fmt" + "strings" + + "github.com/chainlaunch/chainlaunch/pkg/db" + fabricService "github.com/chainlaunch/chainlaunch/pkg/fabric/service" + keyMgmtService "github.com/chainlaunch/chainlaunch/pkg/keymanagement/service" + "github.com/chainlaunch/chainlaunch/pkg/networks/service" + "github.com/hyperledger/fabric-admin-sdk/pkg/chaincode" + "github.com/hyperledger/fabric-admin-sdk/pkg/identity" + fabricnetwork "github.com/hyperledger/fabric-admin-sdk/pkg/network" + gwidentity "github.com/hyperledger/fabric-gateway/pkg/identity" + pb "github.com/hyperledger/fabric-protos-go-apiv2/peer" + "go.uber.org/zap" + "google.golang.org/protobuf/proto" +) + +// FabricLifecycle implements PlatformLifecycle for Hyperledger Fabric +type FabricLifecycle struct { + queries *db.Queries + logger *zap.Logger + orgService *fabricService.OrganizationService + keyMgmtService *keyMgmtService.KeyManagementService + networkService *service.NetworkService +} + +// NewFabricLifecycle creates a new FabricLifecycle instance +func NewFabricLifecycle(queries *db.Queries, logger *zap.Logger, orgService *fabricService.OrganizationService, keyMgmtService *keyMgmtService.KeyManagementService, networkService *service.NetworkService) *FabricLifecycle { + return &FabricLifecycle{ + queries: queries, + logger: logger, + orgService: orgService, + keyMgmtService: keyMgmtService, + networkService: networkService, + } +} + +// PreStart is called before starting the project container +func (f *FabricLifecycle) PreStart(ctx context.Context, params PreStartParams) (*PreStartResult, error) { + f.logger.Info("PreStart hook for Fabric project", + zap.Int64("projectID", params.ProjectID), + zap.String("projectName", params.ProjectName), + zap.String("boilerplate", params.Boilerplate), + ) + + // Validate that the project is associated with a Fabric network + if params.Platform != "fabric" { + return nil, fmt.Errorf("project is not associated with a Fabric network") + } + + // Get network details + network, err := f.queries.GetNetwork(ctx, params.NetworkID) + if err != nil { + return nil, fmt.Errorf("failed to get network details: %w", err) + } + + // Get all organizations in the network + orgs, err := f.queries.ListFabricOrganizations(ctx) + if err != nil { + return nil, fmt.Errorf("failed to list organizations: %w", err) + } + + // Get network nodes + nodes, err := f.networkService.GetNetworkNodes(ctx, params.NetworkID) + if err != nil { + return nil, fmt.Errorf("failed to get network nodes: %w", err) + } + + // Create chaincode package + label := params.ProjectName + chaincodeEndpoint := fmt.Sprintf("%s:%d", params.HostIP, params.Port) + + // Create connection.json + connMap := map[string]interface{}{ + "address": chaincodeEndpoint, + "dial_timeout": "10s", + "tls_required": false, + "client_auth_required": false, + } + connJsonBytes, err := json.Marshal(connMap) + if err != nil { + return nil, fmt.Errorf("failed to marshal connection.json: %w", err) + } + + // Create code.tar.gz + codeTarGz, err := f.createCodeTarGz(connJsonBytes) + if err != nil { + return nil, fmt.Errorf("failed to create code.tar.gz: %w", err) + } + + // Create chaincode package + pkg, err := f.createChaincodePackage(label, codeTarGz) + if err != nil { + return nil, fmt.Errorf("failed to create chaincode package: %w", err) + } + + packageID := chaincode.GetPackageID(label, pkg) + + // Install and approve chaincode for each organization + for _, org := range orgs { + // Get admin identity + adminSignKey, err := f.keyMgmtService.GetKey(ctx, int(org.AdminSignKeyID.Int64)) + if err != nil { + return nil, fmt.Errorf("failed to get admin sign key: %w", err) + } + + // Get private key + privateKeyPEM, err := f.keyMgmtService.GetDecryptedPrivateKey(int(org.AdminSignKeyID.Int64)) + if err != nil { + return nil, fmt.Errorf("failed to get private key: %w", err) + } + + // Create certificate and private key objects + cert, err := gwidentity.CertificateFromPEM([]byte(*adminSignKey.Certificate)) + if err != nil { + return nil, fmt.Errorf("failed to read certificate: %w", err) + } + + priv, err := gwidentity.PrivateKeyFromPEM([]byte(privateKeyPEM)) + if err != nil { + return nil, fmt.Errorf("failed to read private key: %w", err) + } + + // Get network nodes for this organization + orgNodes, err := f.networkService.GetNetworkNodes(ctx, params.NetworkID) + if err != nil { + return nil, fmt.Errorf("failed to get network nodes: %w", err) + } + + // Check if organization has any peers + hasPeers := false + for _, node := range orgNodes { + if node.Node.NodeType == "FABRIC_PEER" && node.Node.FabricPeer != nil && node.Node.FabricPeer.MSPID == org.MspID { + hasPeers = true + break + } + } + + // Skip installation and approval if organization has no peers + if !hasPeers { + f.logger.Info("Skipping chaincode installation and approval for organization without peers", + zap.String("org", org.MspID), + ) + continue + } + + // Install on each peer + for _, node := range orgNodes { + if node.Node.NodeType == "FABRIC_PEER" { + if node.Node.FabricPeer.MSPID != org.MspID { + continue + } + + // Get peer properties + peerProps := node.Node.FabricPeer + if peerProps == nil { + return nil, fmt.Errorf("peer properties not found for node %s", node.Node.Name) + } + + // Create peer connection + peerNode := fabricnetwork.Node{ + Addr: strings.TrimPrefix(peerProps.ExternalEndpoint, "grpcs://"), + TLSCACertByte: []byte(peerProps.TLSCACert), + } + conn, err := fabricnetwork.DialConnection(peerNode) + if err != nil { + return nil, fmt.Errorf("failed to dial peer: %w", err) + } + defer conn.Close() + + // Create signing identity using peer's MSP ID + signingIdentity, err := identity.NewPrivateKeySigningIdentity(peerProps.MSPID, cert, priv) + if err != nil { + return nil, fmt.Errorf("failed to create signing identity: %w", err) + } + + // Install chaincode + peerClient := chaincode.NewPeer(conn, signingIdentity) + result, err := peerClient.Install(ctx, bytes.NewReader(pkg)) + if err != nil && !strings.Contains(err.Error(), "chaincode already successfully installed") { + return nil, fmt.Errorf("failed to install chaincode: %w", err) + } + + if result != nil { + f.logger.Info("Chaincode installed", + zap.String("packageID", result.PackageId), + zap.String("peer", peerProps.ExternalEndpoint), + zap.String("mspID", peerProps.MSPID), + ) + } else { + f.logger.Info("Chaincode already installed", + zap.String("peer", peerProps.ExternalEndpoint), + zap.String("mspID", peerProps.MSPID), + ) + } + } + } + + // Get a peer for this organization + var peerNode *service.NetworkNode + for _, node := range orgNodes { + if node.Node.NodeType == "FABRIC_PEER" && node.Node.FabricPeer != nil && node.Node.FabricPeer.MSPID == org.MspID { + peerNode = &node + break + } + } + if peerNode == nil { + f.logger.Info("Skipping chaincode approval for organization without peers", + zap.String("org", org.MspID), + ) + continue + } + + // Get peer properties + peerProps := peerNode.Node.FabricPeer + if peerProps == nil { + return nil, fmt.Errorf("peer properties not found for node %s", peerNode.Node.Name) + } + + // Create peer connection + peerNodeConn := fabricnetwork.Node{ + Addr: strings.TrimPrefix(peerProps.ExternalEndpoint, "grpcs://"), + TLSCACertByte: []byte(peerProps.TLSCACert), + } + peerConn, err := fabricnetwork.DialConnection(peerNodeConn) + if err != nil { + return nil, fmt.Errorf("failed to dial peer: %w", err) + } + defer peerConn.Close() + + // Create signing identity using peer's MSP ID + signingIdentity, err := identity.NewPrivateKeySigningIdentity(peerProps.MSPID, cert, priv) + if err != nil { + return nil, fmt.Errorf("failed to create signing identity: %w", err) + } + + // Create gateway + gateway := chaincode.NewGateway(peerConn, signingIdentity) + + // Check if chaincode is already committed + committedCC, err := gateway.QueryCommittedWithName( + ctx, + network.Name, + params.ProjectName, + ) + if err != nil { + f.logger.Warn("Error when getting committed chaincodes", zap.Error(err)) + } + + // Create chaincode definition + applicationPolicy, err := chaincode.NewApplicationPolicy("OR('Org1MSP123.member')", "") + if err != nil { + return nil, fmt.Errorf("failed to create application policy: %w", err) + } + + version := "1" + sequence := int64(1) + shouldCommit := committedCC == nil + + if committedCC != nil { + appPolicy := pb.ApplicationPolicy{} + err = proto.Unmarshal(committedCC.GetValidationParameter(), &appPolicy) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal application policy: %w", err) + } + + var signaturePolicyString string + switch policy := appPolicy.Type.(type) { + case *pb.ApplicationPolicy_SignaturePolicy: + signaturePolicyString = policy.SignaturePolicy.String() + default: + return nil, fmt.Errorf("unsupported policy type %T", policy) + } + + newSignaturePolicyString := applicationPolicy.String() + if signaturePolicyString != newSignaturePolicyString { + f.logger.Info("Signature policy changed", + zap.String("old", signaturePolicyString), + zap.String("new", newSignaturePolicyString), + ) + shouldCommit = true + } else { + f.logger.Info("Signature policy not changed", + zap.String("signaturePolicy", signaturePolicyString), + ) + } + + if shouldCommit { + version = committedCC.GetVersion() + sequence = committedCC.GetSequence() + 1 + } else { + version = committedCC.GetVersion() + sequence = committedCC.GetSequence() + } + f.logger.Info("Chaincode already committed", + zap.String("version", version), + zap.Int64("sequence", sequence), + ) + } + + f.logger.Info("Should commit", + zap.Bool("shouldCommit", shouldCommit), + ) + + chaincodeDef := &chaincode.Definition{ + ChannelName: network.Name, + PackageID: packageID, + Name: params.ProjectName, + Version: version, + EndorsementPlugin: "escc", + ValidationPlugin: "vscc", + Sequence: sequence, + ApplicationPolicy: applicationPolicy, + InitRequired: false, + } + + // Approve chaincode + err = gateway.Approve(ctx, chaincodeDef) + if err != nil { + // endorseError, ok := err.(client.EndorseError) + // _ = endorseError + // if ok { + // f.logger.Info("Chaincode already approved", + // zap.String("org", org.MspID), + // ) + // } else { + // return fmt.Errorf("failed to approve chaincode: %w", err) + // } + // if strings.Contains(err.Error(), "redefine uncommitted") { + // f.logger.Info("Chaincode already approved", + // zap.String("org", org.MspID), + // ) + // } else { + // return fmt.Errorf("failed to approve chaincode: %w", err) + // } + } + + f.logger.Info("Chaincode approved", + zap.String("org", peerProps.MSPID), + ) + } + + // Commit chaincode definition + // Find the first organization that has a peer + var firstOrgWithPeer *db.FabricOrganization + for _, org := range orgs { + // Check if this org has any peers + hasPeer := false + for _, node := range nodes { + if node.Node.NodeType == "FABRIC_PEER" && node.Node.FabricPeer != nil && node.Node.FabricPeer.MSPID == org.MspID { + hasPeer = true + break + } + } + if hasPeer { + firstOrgWithPeer = org + break + } + } + + if firstOrgWithPeer == nil { + return nil, fmt.Errorf("no organization with peers found") + } + + firstOrgWithKeys, err := f.queries.GetFabricOrganizationWithKeys(ctx, firstOrgWithPeer.ID) + if err != nil { + return nil, fmt.Errorf("failed to get organization details: %w", err) + } + + // Get admin identity for first org + adminSignKey, err := f.keyMgmtService.GetKey(ctx, int(firstOrgWithKeys.AdminSignKeyID.Int64)) + if err != nil { + return nil, fmt.Errorf("failed to get admin sign key: %w", err) + } + + privateKeyPEM, err := f.keyMgmtService.GetDecryptedPrivateKey(int(firstOrgWithKeys.AdminSignKeyID.Int64)) + if err != nil { + return nil, fmt.Errorf("failed to get private key: %w", err) + } + + cert, err := gwidentity.CertificateFromPEM([]byte(*adminSignKey.Certificate)) + if err != nil { + return nil, fmt.Errorf("failed to read certificate: %w", err) + } + + priv, err := gwidentity.PrivateKeyFromPEM([]byte(privateKeyPEM)) + if err != nil { + return nil, fmt.Errorf("failed to read private key: %w", err) + } + + // Get a peer for the first organization with peers + var peerNode *service.NetworkNode + for _, node := range nodes { + if node.Node.NodeType == "FABRIC_PEER" && node.Node.FabricPeer != nil && node.Node.FabricPeer.MSPID == firstOrgWithPeer.MspID { + peerNode = &node + break + } + } + if peerNode == nil { + return nil, fmt.Errorf("no peer found for organization %s", firstOrgWithPeer.MspID) + } + + // Get peer properties + peerProps := peerNode.Node.FabricPeer + if peerProps == nil { + return nil, fmt.Errorf("peer properties not found for node %s", peerNode.Node.Name) + } + + // Create peer connection + peerNodeConn := fabricnetwork.Node{ + Addr: strings.TrimPrefix(peerProps.ExternalEndpoint, "grpcs://"), + TLSCACertByte: []byte(peerProps.TLSCACert), + } + peerConn, err := fabricnetwork.DialConnection(peerNodeConn) + if err != nil { + return nil, fmt.Errorf("failed to dial peer: %w", err) + } + defer peerConn.Close() + + // Create signing identity using peer's MSP ID + signingIdentity, err := identity.NewPrivateKeySigningIdentity(peerProps.MSPID, cert, priv) + if err != nil { + return nil, fmt.Errorf("failed to create signing identity: %w", err) + } + + // Create gateway + gateway := chaincode.NewGateway(peerConn, signingIdentity) + + // Check if chaincode is already committed + committedCC, err := gateway.QueryCommittedWithName( + ctx, + network.Name, + params.ProjectName, + ) + if err != nil { + f.logger.Warn("Error when getting committed chaincodes", zap.Error(err)) + } + + // Create chaincode definition + applicationPolicy, err := chaincode.NewApplicationPolicy("OR('Org1MSP123.member')", "") + if err != nil { + return nil, fmt.Errorf("failed to create application policy: %w", err) + } + + version := "1" + sequence := int64(1) + shouldCommit := committedCC == nil + + if committedCC != nil { + appPolicy := pb.ApplicationPolicy{} + err = proto.Unmarshal(committedCC.GetValidationParameter(), &appPolicy) + if err != nil { + return nil, fmt.Errorf("failed to unmarshal application policy: %w", err) + } + + var signaturePolicyString string + switch policy := appPolicy.Type.(type) { + case *pb.ApplicationPolicy_SignaturePolicy: + signaturePolicyString = policy.SignaturePolicy.String() + default: + return nil, fmt.Errorf("unsupported policy type %T", policy) + } + + newSignaturePolicyString := applicationPolicy.String() + if signaturePolicyString != newSignaturePolicyString { + f.logger.Info("Signature policy changed", + zap.String("old", signaturePolicyString), + zap.String("new", newSignaturePolicyString), + ) + shouldCommit = true + } else { + f.logger.Info("Signature policy not changed", + zap.String("signaturePolicy", signaturePolicyString), + ) + } + + if shouldCommit { + version = committedCC.GetVersion() + sequence = committedCC.GetSequence() + 1 + } else { + version = committedCC.GetVersion() + sequence = committedCC.GetSequence() + } + f.logger.Info("Chaincode already committed", + zap.String("version", version), + zap.Int64("sequence", sequence), + ) + } + + f.logger.Info("Should commit", + zap.Bool("shouldCommit", shouldCommit), + ) + + chaincodeDef := &chaincode.Definition{ + ChannelName: network.Name, + PackageID: packageID, + Name: params.ProjectName, + Version: version, + EndorsementPlugin: "escc", + ValidationPlugin: "vscc", + Sequence: sequence, + ApplicationPolicy: applicationPolicy, + InitRequired: false, + } + + // Commit chaincode + err = gateway.Commit(ctx, chaincodeDef) + if err != nil { + return nil, fmt.Errorf("failed to commit chaincode: %w", err) + } + + f.logger.Info("Chaincode committed successfully", + zap.String("name", params.ProjectName), + zap.String("version", version), + zap.Int64("sequence", sequence), + zap.String("mspID", peerProps.MSPID), + ) + + // Create environment variables + env := map[string]string{ + "CORE_CHAINCODE_ADDRESS": "0.0.0.0:4000", + "CORE_CHAINCODE_ID": packageID, + "CORE_PEER_TLS_ENABLED": "false", + } + + return &PreStartResult{ + Environment: env, + }, nil +} + +// createCodeTarGz creates a code.tar.gz file containing the connection.json +func (f *FabricLifecycle) createCodeTarGz(connJsonBytes []byte) ([]byte, error) { + buf := &bytes.Buffer{} + gw := gzip.NewWriter(buf) + tw := tar.NewWriter(gw) + + // Write connection.json + header := &tar.Header{ + Name: "connection.json", + Size: int64(len(connJsonBytes)), + Mode: 0755, + } + if err := tw.WriteHeader(header); err != nil { + return nil, fmt.Errorf("failed to write tar header: %w", err) + } + if _, err := tw.Write(connJsonBytes); err != nil { + return nil, fmt.Errorf("failed to write connection.json: %w", err) + } + + if err := tw.Close(); err != nil { + return nil, fmt.Errorf("failed to close tar writer: %w", err) + } + if err := gw.Close(); err != nil { + return nil, fmt.Errorf("failed to close gzip writer: %w", err) + } + + return buf.Bytes(), nil +} + +// createChaincodePackage creates a chaincode package containing metadata.json and code.tar.gz +func (f *FabricLifecycle) createChaincodePackage(label string, codeTarGz []byte) ([]byte, error) { + metadataJson := fmt.Sprintf(`{ + "type": "ccaas", + "label": "%s" + }`, label) + + buf := &bytes.Buffer{} + gw := gzip.NewWriter(buf) + tw := tar.NewWriter(gw) + + // Write metadata.json + header := &tar.Header{ + Name: "metadata.json", + Size: int64(len(metadataJson)), + Mode: 0755, + } + if err := tw.WriteHeader(header); err != nil { + return nil, fmt.Errorf("failed to write tar header: %w", err) + } + if _, err := tw.Write([]byte(metadataJson)); err != nil { + return nil, fmt.Errorf("failed to write metadata.json: %w", err) + } + + // Write code.tar.gz + header = &tar.Header{ + Name: "code.tar.gz", + Size: int64(len(codeTarGz)), + Mode: 0755, + } + if err := tw.WriteHeader(header); err != nil { + return nil, fmt.Errorf("failed to write tar header: %w", err) + } + if _, err := tw.Write(codeTarGz); err != nil { + return nil, fmt.Errorf("failed to write code.tar.gz: %w", err) + } + + if err := tw.Close(); err != nil { + return nil, fmt.Errorf("failed to close tar writer: %w", err) + } + if err := gw.Close(); err != nil { + return nil, fmt.Errorf("failed to close gzip writer: %w", err) + } + + return buf.Bytes(), nil +} + +// PostStart is called after the project container has started +func (f *FabricLifecycle) PostStart(ctx context.Context, params PostStartParams) error { + f.logger.Info("PostStart hook for Fabric project", + zap.Int64("projectID", params.ProjectID), + zap.String("projectName", params.ProjectName), + zap.String("containerID", params.ContainerID), + ) + + // Get network details + network, err := f.queries.GetNetwork(ctx, params.NetworkID) + if err != nil { + return fmt.Errorf("failed to get network details: %w", err) + } + + // TODO: Implement chaincode installation and approval + // This will involve: + // 1. Getting the chaincode package from the container + // 2. Installing it on the peers + // 3. Approving the chaincode definition + // 4. Committing the chaincode definition + _ = network // TODO: Use network details for chaincode installation + + f.logger.Info("Chaincode setup completed", + zap.Int64("projectID", params.ProjectID), + zap.String("projectName", params.ProjectName), + ) + + return nil +} + +// PreStop is called before stopping the project container +func (f *FabricLifecycle) PreStop(ctx context.Context, params PreStopParams) error { + f.logger.Info("PreStop hook for Fabric project", + zap.Int64("projectID", params.ProjectID), + zap.String("projectName", params.ProjectName), + zap.String("containerID", params.ContainerID), + ) + + // TODO: Implement any necessary cleanup before stopping + // This might include: + // 1. Saving chaincode state + // 2. Cleaning up temporary files + // 3. Updating project status + + return nil +} + +// PostStop is called after the project container has stopped +func (f *FabricLifecycle) PostStop(ctx context.Context, params PostStopParams) error { + f.logger.Info("PostStop hook for Fabric project", + zap.Int64("projectID", params.ProjectID), + zap.String("projectName", params.ProjectName), + zap.String("containerID", params.ContainerID), + ) + + // Update project status in database + err := f.queries.UpdateProjectContainerInfo(ctx, &db.UpdateProjectContainerInfoParams{ + ID: params.ProjectID, + Status: sql.NullString{String: "stopped", Valid: true}, + LastStoppedAt: sql.NullTime{Time: params.StoppedAt, Valid: true}, + }) + if err != nil { + return fmt.Errorf("failed to update project status: %w", err) + } + + return nil +} diff --git a/pkg/scai/projects/handlers.go b/pkg/scai/projects/handlers.go new file mode 100644 index 0000000..9b6e99e --- /dev/null +++ b/pkg/scai/projects/handlers.go @@ -0,0 +1,580 @@ +package projects + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "strconv" + + "github.com/chainlaunch/chainlaunch/pkg/errors" + "github.com/chainlaunch/chainlaunch/pkg/http/response" + "github.com/chainlaunch/chainlaunch/pkg/scai/versionmanagement" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "go.uber.org/zap" +) + +func NewProjectsHandler(service *ProjectsService, root string) *ProjectsHandler { + return &ProjectsHandler{ + Root: root, + Service: service, + } +} + +type ProjectsHandler struct { + Root string + Service *ProjectsService +} + +type CreateProjectRequest struct { + Name string `json:"name" validate:"required" example:"myproject" description:"Project name"` + Description string `json:"description" example:"A sample project" description:"Project description"` + Boilerplate string `json:"boilerplate" example:"go-basic" description:"Boilerplate template to use for scaffolding"` + NetworkID *int64 `json:"networkId,omitempty" example:"1" description:"ID of the network to link with"` +} + +type CreateProjectResponse struct { + ID int64 `json:"id" example:"1" description:"Project ID"` + Name string `json:"name" example:"myproject" description:"Project name"` + Slug string `json:"slug" example:"myproject-abc12" description:"Project slug (used for proxying and folder name)"` + Description string `json:"description" example:"A sample project" description:"Project description"` + Boilerplate string `json:"boilerplate" example:"go-basic" description:"Boilerplate template used for scaffolding"` + ContainerPort *int `json:"containerPort,omitempty" description:"Host port mapped to the container, if running"` + NetworkID *int64 `json:"networkId,omitempty" description:"ID of the linked network"` +} + +type ListProjectsResponse struct { + Projects []Project `json:"projects"` +} + +// CommitWithFileChangesAPI is the API response struct for a commit with file changes +// (mirrors versionmanagement.CommitWithFileChanges) +type CommitWithFileChangesAPI struct { + Hash string `json:"hash"` + Author string `json:"author"` + Timestamp string `json:"timestamp"` + Message string `json:"message"` + Added []string `json:"added"` + Removed []string `json:"removed"` + Modified []string `json:"modified"` + Parent *string `json:"parent"` +} + +// CommitDetailAPI is the API response struct for a single commit with file changes +// (mirrors versionmanagement.CommitWithFileChanges) +type CommitDetailAPI struct { + Hash string `json:"hash"` + Author string `json:"author"` + Timestamp string `json:"timestamp"` + Message string `json:"message"` + Added []string `json:"added"` + Removed []string `json:"removed"` + Modified []string `json:"modified"` + Parent *string `json:"parent"` +} + +// CommitsListResponse is the API response struct for a list of commits +// Used for OpenAPI/Swagger documentation +// swagger:model +type CommitsListResponse struct { + Commits []CommitWithFileChangesAPI `json:"commits"` +} + +// RegisterRoutes registers project endpoints to the router +func (h *ProjectsHandler) RegisterRoutes(r chi.Router) { + r.Route("/projects", func(r chi.Router) { + r.Post("/", response.Middleware(h.CreateProject)) + r.Get("/", response.Middleware(h.ListProjects)) + r.Get("/{id}", response.Middleware(h.GetProject)) + r.Post("/{id}/start", response.Middleware(h.StartProjectServer)) + r.Post("/{id}/stop", response.Middleware(h.StopProjectServer)) + r.Get("/{id}/logs", response.Middleware(h.GetProjectLogs)) + r.Get("/{id}/logs/stream", response.Middleware(h.StreamProjectLogs)) + r.Get("/{id}/commits", response.Middleware(h.GetProjectCommits)) + r.Get("/{id}/commits/{commitHash}", response.Middleware(h.GetProjectCommitDetail)) + r.Get("/{id}/diff", response.Middleware(h.GetProjectFileDiff)) + r.Get("/{id}/file_at_commit", response.Middleware(h.GetProjectFileAtCommit)) + }) +} + +// CreateProject godoc +// @Summary Create a project +// @Description Create a new project, scaffold its directory, and store it in the DB +// @Tags projects +// @Accept json +// @Produce json +// @Param request body CreateProjectRequest true "Project info" +// @Success 201 {object} CreateProjectResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 401 {object} response.ErrorResponse +// @Failure 403 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 409 {object} response.ErrorResponse +// @Failure 422 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects [post] +func (h *ProjectsHandler) CreateProject(w http.ResponseWriter, r *http.Request) error { + var req CreateProjectRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + return errors.NewValidationError("invalid request body", map[string]interface{}{ + "error": err.Error(), + }) + } + + if req.Name == "" { + return errors.NewValidationError("name is required", nil) + } + + // Example: check for forbidden name + if req.Name == "forbidden" { + return errors.NewAuthorizationError("project name is forbidden", nil) + } + + // Example: check for conflict (duplicate project) + if req.Name == "conflict" { + return errors.NewConflictError("project already exists", nil) + } + + proj, err := h.Service.CreateProject(r.Context(), req.Name, req.Description, req.Boilerplate, req.NetworkID) + if err != nil { + return errors.NewInternalError("failed to create project", err, nil) + } + + zap.L().Info("created project", zap.Int64("id", proj.ID), zap.String("name", proj.Name), zap.String("request_id", middleware.GetReqID(r.Context()))) + + return response.WriteJSON(w, http.StatusCreated, CreateProjectResponse{ + ID: proj.ID, + Name: proj.Name, + Slug: proj.Slug, + Description: proj.Description, + Boilerplate: proj.Boilerplate, + ContainerPort: proj.ContainerPort, + NetworkID: proj.NetworkID, + }) +} + +// ListProjects godoc +// @Summary List all projects +// @Description Get a list of all projects +// @Tags projects +// @Produce json +// @Success 200 {object} ListProjectsResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects [get] +func (h *ProjectsHandler) ListProjects(w http.ResponseWriter, r *http.Request) error { + projs, err := h.Service.ListProjects(r.Context()) + if err != nil { + return errors.NewInternalError("failed to list projects", err, nil) + } + + zap.L().Info("listed projects", zap.Int("count", len(projs)), zap.String("request_id", middleware.GetReqID(r.Context()))) + + return response.WriteJSON(w, http.StatusOK, ListProjectsResponse{Projects: projs}) +} + +// GetProject godoc +// @Summary Get a project by ID +// @Description Get details of a project by its ID +// @Tags projects +// @Produce json +// @Param id path int true "Project ID" +// @Success 200 {object} Project +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id} [get] +func (h *ProjectsHandler) GetProject(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + zap.L().Info("got project", zap.Int64("id", proj.ID), zap.String("name", proj.Name), zap.String("request_id", middleware.GetReqID(r.Context()))) + + return response.WriteJSON(w, http.StatusOK, proj) +} + +// StartProjectServer godoc +// @Summary Start the server for a project +// @Description Start the server process for a given project using its boilerplate +// @Tags projects +// @Produce json +// @Param id path int true "Project ID" +// @Success 200 {object} map[string]string +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/start [post] +func (h *ProjectsHandler) StartProjectServer(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + err = h.Service.StartProjectServer(r.Context(), proj.ID) + if err != nil { + return errors.NewInternalError("failed to start project server", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, map[string]string{ + "status": "server started for project id " + idStr, + }) +} + +// StopProjectServer godoc +// @Summary Stop the server for a project +// @Description Stop the server process for a given project +// @Tags projects +// @Produce json +// @Param id path int true "Project ID" +// @Success 200 {object} map[string]string +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/stop [post] +func (h *ProjectsHandler) StopProjectServer(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + err = h.Service.StopProjectServer(r.Context(), id) + if err != nil { + return errors.NewInternalError("failed to stop project server", err, nil) + } + + return response.WriteJSON(w, http.StatusOK, map[string]string{ + "status": "server stopped for project id " + idStr, + }) +} + +// GetProjectLogs godoc +// @Summary Get logs for a project server +// @Description Stream or return the logs for the project's running container +// @Tags projects +// @Produce text/plain +// @Param id path int true "Project ID" +// @Success 200 {string} string "Logs" +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/logs [get] +func (h *ProjectsHandler) GetProjectLogs(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + logs, err := h.Service.GetProjectLogs(r.Context(), id) + if err != nil { + return errors.NewInternalError("failed to get logs", err, nil) + } + + w.Header().Set("Content-Type", "text/plain") + w.WriteHeader(http.StatusOK) + w.Write([]byte(logs)) + return nil +} + +// StreamProjectLogs godoc +// @Summary Stream real-time logs for a project server +// @Description Stream logs for the project's running container using SSE +// @Tags projects +// @Produce text/event-stream +// @Param id path int true "Project ID" +// @Success 200 {string} string "SSE stream of logs" +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/logs/stream [get] +func (h *ProjectsHandler) StreamProjectLogs(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + flusher, ok := w.(http.Flusher) + if !ok { + return errors.NewInternalError("streaming not supported", nil, nil) + } + + w.Header().Set("Content-Type", "text/event-stream") + w.Header().Set("Cache-Control", "no-cache") + w.Header().Set("Connection", "keep-alive") + + ctx := r.Context() + err = h.Service.StreamProjectLogs(ctx, id, func(chunk []byte) { + fmt.Fprintf(w, "data: %s\n\n", chunk) + flusher.Flush() + }) + + if err != nil && err != context.Canceled { + return errors.NewInternalError("failed to stream logs", err, nil) + } + + return nil +} + +// GetProjectCommits godoc +// @Summary List project commits with file changes +// @Description Get a paginated list of commits for a project, including added/removed/modified files +// @Tags projects +// @Produce json +// @Param id path int true "Project ID" +// @Param page query int false "Page number (default 1)" +// @Param pageSize query int false "Page size (default 20)" +// @Success 200 {object} CommitsListResponse +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/commits [get] +func (h *ProjectsHandler) GetProjectCommits(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + // Pagination params + page := 1 + pageSize := 20 + if p := r.URL.Query().Get("page"); p != "" { + if v, err := strconv.Atoi(p); err == nil && v > 0 { + page = v + } + } + if ps := r.URL.Query().Get("pageSize"); ps != "" { + if v, err := strconv.Atoi(ps); err == nil && v > 0 { + pageSize = v + } + } + + projectDir := h.Service.ProjectsDir + "/" + proj.Name + maxCommits := page * pageSize + commits, err := versionmanagement.ListCommitsWithFileChanges(r.Context(), projectDir, maxCommits) + if err != nil { + return errors.NewInternalError("failed to get commits", err, nil) + } + + // Paginate + start := (page - 1) * pageSize + if start > len(commits) { + start = len(commits) + } + end := start + pageSize + if end > len(commits) { + end = len(commits) + } + + apiCommits := make([]CommitWithFileChangesAPI, 0, end-start) + for _, c := range commits[start:end] { + apiCommits = append(apiCommits, CommitWithFileChangesAPI{ + Hash: c.Hash, + Author: c.Author, + Timestamp: c.Timestamp, + Message: c.Message, + Added: c.Added, + Removed: c.Removed, + Modified: c.Modified, + Parent: c.Parent, + }) + } + + return response.WriteJSON(w, http.StatusOK, CommitsListResponse{Commits: apiCommits}) +} + +// GetProjectCommitDetail godoc +// @Summary Get commit details +// @Description Get details for a single commit, including file changes +// @Tags projects +// @Produce json +// @Param id path int true "Project ID" +// @Param commitHash path string true "Commit hash" +// @Success 200 {object} CommitDetailAPI +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/commits/{commitHash} [get] +func (h *ProjectsHandler) GetProjectCommitDetail(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + commitHash := chi.URLParam(r, "commitHash") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + projectDir := h.Service.ProjectsDir + "/" + proj.Name + commits, err := versionmanagement.ListCommitsWithFileChanges(r.Context(), projectDir, 1000) + if err != nil { + return errors.NewInternalError("failed to get commits", err, nil) + } + + for _, c := range commits { + if c.Hash == commitHash { + return response.WriteJSON(w, http.StatusOK, CommitDetailAPI{ + Hash: c.Hash, + Author: c.Author, + Timestamp: c.Timestamp, + Message: c.Message, + Added: c.Added, + Removed: c.Removed, + Modified: c.Modified, + Parent: c.Parent, + }) + } + } + + return errors.NewNotFoundError("commit not found", nil) +} + +// GetProjectFileDiff godoc +// @Summary Get file diff between two commits +// @Description Get the diff of a file between two commits +// @Tags projects +// @Produce text/plain +// @Param id path int true "Project ID" +// @Param file query string true "File path (relative to project root)" +// @Param from query string true "From commit hash" +// @Param to query string true "To commit hash" +// @Success 200 {string} string "Diff" +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/diff [get] +func (h *ProjectsHandler) GetProjectFileDiff(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + file := r.URL.Query().Get("file") + from := r.URL.Query().Get("from") + to := r.URL.Query().Get("to") + + if file == "" || from == "" || to == "" { + return errors.NewValidationError("missing file, from, or to parameter", nil) + } + + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + projectDir := h.Service.ProjectsDir + "/" + proj.Name + diff, err := versionmanagement.GetFileDiffBetweenCommits(r.Context(), projectDir, file, from, to) + if err != nil { + return errors.NewInternalError("failed to get diff", err, nil) + } + + w.Header().Set("Content-Type", "text/plain") + w.WriteHeader(http.StatusOK) + w.Write([]byte(diff)) + return nil +} + +// GetProjectFileAtCommit godoc +// @Summary Get file contents at a specific commit +// @Description Get the contents of a file at a specific commit hash +// @Tags projects +// @Produce text/plain +// @Param id path int true "Project ID" +// @Param file query string true "File path (relative to project root)" +// @Param commit query string true "Commit hash" +// @Success 200 {string} string "File contents" +// @Failure 400 {object} response.ErrorResponse +// @Failure 404 {object} response.ErrorResponse +// @Failure 500 {object} response.ErrorResponse +// @Router /api/v1/projects/{id}/file_at_commit [get] +func (h *ProjectsHandler) GetProjectFileAtCommit(w http.ResponseWriter, r *http.Request) error { + idStr := chi.URLParam(r, "id") + file := r.URL.Query().Get("file") + commit := r.URL.Query().Get("commit") + + if file == "" || commit == "" { + return errors.NewValidationError("missing file or commit parameter", nil) + } + + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return errors.NewValidationError("invalid project id", map[string]interface{}{ + "error": err.Error(), + }) + } + + proj, err := h.Service.GetProject(r.Context(), id) + if err != nil { + if err == ErrNotFound { + return errors.NewNotFoundError("project not found", nil) + } + return errors.NewInternalError("failed to get project", err, nil) + } + + projectDir := h.Service.ProjectsDir + "/" + proj.Name + content, err := versionmanagement.GetFileAtCommit(r.Context(), projectDir, file, commit) + if err != nil { + return errors.NewInternalError("failed to get file at commit", err, nil) + } + + w.Header().Set("Content-Type", "text/plain") + w.WriteHeader(http.StatusOK) + w.Write([]byte(content)) + return nil +} diff --git a/pkg/scai/projects/platform.go b/pkg/scai/projects/platform.go new file mode 100644 index 0000000..2d39e88 --- /dev/null +++ b/pkg/scai/projects/platform.go @@ -0,0 +1,96 @@ +package projects + +import ( + "context" + "fmt" + "time" + + "github.com/chainlaunch/chainlaunch/pkg/db" + fabricService "github.com/chainlaunch/chainlaunch/pkg/fabric/service" + keyMgmtService "github.com/chainlaunch/chainlaunch/pkg/keymanagement/service" + "github.com/chainlaunch/chainlaunch/pkg/networks/service" + "go.uber.org/zap" +) + +// ProjectLifecycleParams contains common parameters for all lifecycle hooks +type ProjectLifecycleParams struct { + ProjectID int64 + ProjectName string + ProjectSlug string + NetworkID int64 + NetworkName string + Platform string + Boilerplate string +} + +// PreStartParams contains parameters for the PreStart lifecycle hook +type PreStartParams struct { + ProjectLifecycleParams + Image string + Port int + Command string + Args []string + Environment map[string]string + HostIP string +} + +// PreStartResult contains the result of the PreStart lifecycle hook +type PreStartResult struct { + Environment map[string]string +} + +// PostStartParams contains parameters for the PostStart hook +type PostStartParams struct { + ProjectLifecycleParams + ContainerID string + Image string + Port int + StartedAt time.Time + Status string + HostIP string // IP address where the smart contract will be deployed +} + +// PreStopParams contains parameters for the PreStop hook +type PreStopParams struct { + ProjectLifecycleParams + ContainerID string + StartedAt time.Time +} + +// PostStopParams contains parameters for the PostStop hook +type PostStopParams struct { + ProjectLifecycleParams + ContainerID string + StartedAt time.Time + StoppedAt time.Time +} + +// PlatformLifecycle defines the interface for platform-specific project lifecycle hooks +type PlatformLifecycle interface { + // PreStart is called before starting the project container + // It can be used to prepare the environment, validate configuration, etc. + PreStart(ctx context.Context, params PreStartParams) (*PreStartResult, error) + + // PostStart is called after the project container has started + // It can be used to perform platform-specific setup, like installing chaincode + PostStart(ctx context.Context, params PostStartParams) error + + // PreStop is called before stopping the project container + // It can be used to perform cleanup or save state + PreStop(ctx context.Context, params PreStopParams) error + + // PostStop is called after the project container has stopped + // It can be used to perform final cleanup or state updates + PostStop(ctx context.Context, params PostStopParams) error +} + +// GetPlatformLifecycle returns the appropriate lifecycle implementation for the given platform +func GetPlatformLifecycle(platform string, queries *db.Queries, orgService *fabricService.OrganizationService, keyMgmtService *keyMgmtService.KeyManagementService, networkService *service.NetworkService, logger *zap.Logger) (PlatformLifecycle, error) { + switch platform { + case "fabric": + return NewFabricLifecycle(queries, logger, orgService, keyMgmtService, networkService), nil + // Add more platform cases here as needed + default: + return nil, fmt.Errorf("unsupported platform: %s", platform) + } +} diff --git a/pkg/scai/projects/projects.go b/pkg/scai/projects/projects.go new file mode 100644 index 0000000..beea566 --- /dev/null +++ b/pkg/scai/projects/projects.go @@ -0,0 +1,68 @@ +package projects + +import ( + "context" + "database/sql" + "fmt" + "os" + "path/filepath" + + "github.com/chainlaunch/chainlaunch/pkg/db" + "github.com/chainlaunch/chainlaunch/pkg/scai/versionmanagement" +) + +const ProjectsRoot = "./data/projects" + +func ListProjects(q *db.Queries, ctx context.Context) ([]*db.Project, error) { + return q.ListProjects(ctx) +} + +func CreateProject(q *db.Queries, ctx context.Context, name, description string) (*db.Project, error) { + proj, err := q.CreateProject(ctx, &db.CreateProjectParams{Name: name, Description: sql.NullString{String: description, Valid: description != ""}}) + if err != nil { + return proj, err + } + projDir := filepath.Join(ProjectsRoot, name) + if err := os.MkdirAll(projDir, 0755); err != nil { + return proj, fmt.Errorf("failed to create project directory: %w", err) + } + + // Use versionmanagement to initialize the repo and make the initial commit + vm := versionmanagement.NewDefaultManager() + // Create a .gitkeep file so there's something to commit + gitkeepPath := filepath.Join(projDir, ".gitkeep") + if err := os.WriteFile(gitkeepPath, []byte{}, 0644); err != nil { + return proj, fmt.Errorf("failed to create .gitkeep: %w", err) + } + // Initialize the repo (if not already initialized) + if _, err := os.Stat(filepath.Join(projDir, ".git")); os.IsNotExist(err) { + if err := os.Chdir(projDir); err != nil { + return proj, fmt.Errorf("failed to change dir: %w", err) + } + if err := vm.CommitChange(ctx, "Initial repository"); err != nil { + return proj, fmt.Errorf("failed to initialize version management: %w", err) + } + if err := os.Chdir("../../.."); err != nil { // Return to root + return proj, fmt.Errorf("failed to return to root dir: %w", err) + } + } + return proj, nil +} + +func DeleteProject(q *db.Queries, ctx context.Context, id int64, name string) error { + if err := q.DeleteProject(ctx, id); err != nil { + return err + } + projDir := filepath.Join(ProjectsRoot, name) + if err := os.RemoveAll(projDir); err != nil { + return fmt.Errorf("failed to remove project directory: %w", err) + } + // Commit the deletion using versionmanagement + vm := versionmanagement.NewDefaultManager() + cwd, _ := os.Getwd() + if err := os.Chdir(ProjectsRoot); err == nil { + _ = vm.CommitChange(ctx, "Deleted project "+name) + _ = os.Chdir(cwd) + } + return nil +} diff --git a/pkg/scai/projects/service.go b/pkg/scai/projects/service.go new file mode 100644 index 0000000..abdb06a --- /dev/null +++ b/pkg/scai/projects/service.go @@ -0,0 +1,478 @@ +package projects + +import ( + "context" + "database/sql" + "errors" + "fmt" + "io" + "net" + "os" + "os/exec" + "path/filepath" + "sync" + "time" + + "crypto/rand" + "encoding/hex" + "strings" + + "github.com/chainlaunch/chainlaunch/pkg/common/addresses" + "github.com/chainlaunch/chainlaunch/pkg/db" + fabricService "github.com/chainlaunch/chainlaunch/pkg/fabric/service" + keyMgmtService "github.com/chainlaunch/chainlaunch/pkg/keymanagement/service" + networkservice "github.com/chainlaunch/chainlaunch/pkg/networks/service" + "github.com/chainlaunch/chainlaunch/pkg/scai/boilerplates" + "github.com/chainlaunch/chainlaunch/pkg/scai/projectrunner" + "github.com/chainlaunch/chainlaunch/pkg/scai/versionmanagement" + "github.com/go-chi/chi/v5/middleware" + "go.uber.org/zap" +) + +type ProjectsService struct { + Queries *db.Queries + Runner *projectrunner.Runner + ProjectsDir string + BoilerplateService *boilerplates.BoilerplateService + OrgService *fabricService.OrganizationService + KeyMgmtService *keyMgmtService.KeyManagementService + NetworkService *networkservice.NetworkService +} + +type Project struct { + ID int64 `json:"id" example:"1" description:"Project ID"` + Name string `json:"name" example:"myproject" description:"Project name"` + Slug string `json:"slug" example:"myproject-abc12" description:"Project slug (used for proxying and folder name)"` + Description string `json:"description" example:"A sample project" description:"Project description"` + Boilerplate string `json:"boilerplate" example:"go-basic" description:"Boilerplate template used for scaffolding"` + Status string `json:"status" example:"running" description:"Project container status (running/stopped/etc)"` + LastStartedAt *string `json:"lastStartedAt,omitempty" description:"Last time the project was started (RFC3339)"` + LastStoppedAt *string `json:"lastStoppedAt,omitempty" description:"Last time the project was stopped (RFC3339)"` + ContainerPort *int `json:"containerPort,omitempty" description:"Host port mapped to the container, if running"` + NetworkID *int64 `json:"networkId,omitempty" description:"ID of the linked network"` +} + +// ProjectProcessManager manages running server processes for projects +var projectProcessManager = struct { + mu sync.Mutex + servers map[int64]*exec.Cmd +}{servers: make(map[int64]*exec.Cmd)} + +// NewProjectsService creates a new ProjectsService instance +func NewProjectsService(queries *db.Queries, runner *projectrunner.Runner, projectsDir string, orgService *fabricService.OrganizationService, keyMgmtService *keyMgmtService.KeyManagementService, networkService *networkservice.NetworkService) (*ProjectsService, error) { + boilerplateService, err := boilerplates.NewBoilerplateService(queries) + if err != nil { + return nil, err + } + return &ProjectsService{ + Queries: queries, + Runner: runner, + ProjectsDir: projectsDir, + BoilerplateService: boilerplateService, + OrgService: orgService, + KeyMgmtService: keyMgmtService, + NetworkService: networkService, + }, nil +} + +func getReqID(ctx context.Context) string { + if reqID, ok := ctx.Value(middleware.RequestIDKey).(string); ok { + return reqID + } + return "" +} + +// Helper to copy a directory recursively +func copyDir(src string, dst string) error { + return filepath.Walk(src, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + relPath, err := filepath.Rel(src, path) + if err != nil { + return err + } + destPath := filepath.Join(dst, relPath) + if info.IsDir() { + return os.MkdirAll(destPath, info.Mode()) + } + srcFile, err := os.Open(path) + if err != nil { + return err + } + defer srcFile.Close() + dstFile, err := os.OpenFile(destPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, info.Mode()) + if err != nil { + return err + } + defer dstFile.Close() + _, err = io.Copy(dstFile, srcFile) + return err + }) +} + +func generateShortGUID(n int) (string, error) { + b := make([]byte, n) + _, err := rand.Read(b) + if err != nil { + return "", err + } + return hex.EncodeToString(b)[:n], nil +} + +func generateSlug(name string, queries *db.Queries, ctx context.Context) (string, error) { + base := strings.ToLower(strings.ReplaceAll(name, " ", "-")) + for { + guid, err := generateShortGUID(5) + if err != nil { + return "", err + } + slug := base + "-" + guid + // Check uniqueness + _, err = queries.GetProjectBySlug(ctx, slug) + if err != nil && err != sql.ErrNoRows { + return "", err + } + if err == sql.ErrNoRows { + return slug, nil + } + // else, collision, try again + } +} + +func (s *ProjectsService) CreateProject(ctx context.Context, name, description, boilerplate string, networkID *int64) (Project, error) { + slug, err := generateSlug(name, s.Queries, ctx) + if err != nil { + return Project{}, err + } + proj, err := s.Queries.CreateProject(ctx, &db.CreateProjectParams{ + Name: name, + Description: sql.NullString{String: description, Valid: description != ""}, + Boilerplate: sql.NullString{String: boilerplate, Valid: boilerplate != ""}, + Slug: slug, + NetworkID: sql.NullInt64{Int64: *networkID, Valid: networkID != nil}, + }) + if err != nil { + zap.L().Error("DB error in CreateProject", zap.String("name", name), zap.Error(err), zap.String("request_id", getReqID(ctx))) + return Project{}, err + } + zap.L().Info("created project in DB", zap.Int64("id", proj.ID), zap.String("name", proj.Name), zap.String("slug", proj.Slug), zap.String("request_id", getReqID(ctx))) + + // Download boilerplate if specified + if boilerplate != "" { + projectDir := filepath.Join(s.ProjectsDir, slug) + if err := s.BoilerplateService.DownloadBoilerplate(ctx, boilerplate, projectDir); err != nil { + zap.L().Error("failed to download boilerplate", zap.String("boilerplate", boilerplate), zap.Error(err)) + return Project{}, err + } + + // Ensure git repository is initialized before committing + gitDir := filepath.Join(projectDir, ".git") + if _, err := os.Stat(gitDir); os.IsNotExist(err) { + // Initialize the repo using go-git + _, err := versionmanagement.InitRepo(projectDir) + if err != nil { + zap.L().Error("failed to initialize git repo", zap.Error(err)) + } + } + vm := versionmanagement.NewDefaultManager() + cwd, _ := os.Getwd() + if err := os.Chdir(projectDir); err == nil { + err = vm.CommitChange(ctx, "Initial commit for project "+name) + if err != nil { + zap.L().Error("failed to commit", zap.Error(err)) + } + if err := os.Chdir(cwd); err != nil { + zap.L().Error("failed to return to original directory", zap.Error(err)) + } + } + } + return dbProjectToAPI(proj), nil +} + +func (s *ProjectsService) ListProjects(ctx context.Context) ([]Project, error) { + dbProjects, err := s.Queries.ListProjects(ctx) + if err != nil { + zap.L().Error("DB error in ListProjects", zap.Error(err), zap.String("request_id", getReqID(ctx))) + return nil, err + } + var projects []Project + for _, p := range dbProjects { + projects = append(projects, dbProjectToAPI(p)) + } + zap.L().Info("listed projects from DB", zap.Int("count", len(projects)), zap.String("request_id", getReqID(ctx))) + return projects, nil +} + +func (s *ProjectsService) GetProject(ctx context.Context, id int64) (Project, error) { + p, err := s.Queries.GetProject(ctx, id) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + zap.L().Warn("project not found in DB", zap.Int64("id", id), zap.String("request_id", getReqID(ctx))) + return Project{}, ErrNotFound + } + zap.L().Error("DB error in GetProject", zap.Int64("id", id), zap.Error(err), zap.String("request_id", getReqID(ctx))) + return Project{}, err + } + zap.L().Info("got project from DB", zap.Int64("id", p.ID), zap.String("name", p.Name), zap.String("request_id", getReqID(ctx))) + return dbProjectToAPI(p), nil +} + +func dbProjectToAPI(p *db.Project) Project { + var started, stopped *string + if p.LastStartedAt.Valid { + ts := p.LastStartedAt.Time.UTC().Format(time.RFC3339) + started = &ts + } + if p.LastStoppedAt.Valid { + ts := p.LastStoppedAt.Time.UTC().Format(time.RFC3339) + stopped = &ts + } + var containerPort *int + if p.ContainerPort.Valid { + v := int(p.ContainerPort.Int64) + containerPort = &v + } + var networkID *int64 + if p.NetworkID.Valid { + networkID = &p.NetworkID.Int64 + } + return Project{ + ID: p.ID, + Name: p.Name, + Slug: p.Slug, + Description: p.Description.String, + Boilerplate: p.Boilerplate.String, + Status: p.Status.String, + LastStartedAt: started, + LastStoppedAt: stopped, + ContainerPort: containerPort, + NetworkID: networkID, + } +} + +var ErrNotFound = errors.New("not found") + +// findAvailablePort finds an available port starting from the given port +func findAvailablePort(startPort int) (int, error) { + maxAttempts := 100 + for port := startPort; port < startPort+maxAttempts; port++ { + addr := fmt.Sprintf(":%d", port) + listener, err := net.Listen("tcp", addr) + if err == nil { + listener.Close() + return port, nil + } + } + return 0, fmt.Errorf("no available ports found after %d attempts starting from %d", maxAttempts, startPort) +} + +// StartProjectServer starts the server process for a project +func (s *ProjectsService) StartProjectServer(ctx context.Context, projectID int64) error { + project, err := s.Queries.GetProject(ctx, projectID) + if err != nil { + return fmt.Errorf("failed to get project: %w", err) + } + + if !project.Boilerplate.Valid { + return fmt.Errorf("project has no boilerplate configured") + } + projectDB, err := s.Queries.GetProject(ctx, projectID) + if err != nil { + return fmt.Errorf("failed to get project: %w", err) + } + networkDB, err := s.Queries.GetNetwork(ctx, projectDB.NetworkID.Int64) + if err != nil { + return fmt.Errorf("failed to get network: %w", err) + } + + // Get the appropriate lifecycle implementation for the platform + lifecycle, err := GetPlatformLifecycle(networkDB.Platform, s.Queries, s.OrgService, s.KeyMgmtService, s.NetworkService, zap.L()) + if err != nil { + zap.L().Warn("failed to get platform lifecycle, continuing without lifecycle hooks", + zap.String("platform", project.Boilerplate.String), + zap.Error(err), + ) + } + + command, args, image, err := projectrunner.GetBoilerplateRunner(s.BoilerplateService, project.Boilerplate.String) + if err != nil { + return fmt.Errorf("failed to get boilerplate runner: %w", err) + } + + projectDir, err := filepath.Abs(filepath.Join(s.ProjectsDir, project.Slug)) + if err != nil { + return err + } + + // Get the host IP for smart contract deployment + hostIP, err := addresses.GetExternalIP() + if err != nil { + zap.L().Warn("failed to get host IP, using localhost", + zap.Error(err), + ) + hostIP = "127.0.0.1" + } + + // Find an available port + port, err := findAvailablePort(40000) + if err != nil { + return fmt.Errorf("failed to find available port: %w", err) + } + + // Call PreStart lifecycle hook if available + var env map[string]string + if lifecycle != nil { + preStartParams := PreStartParams{ + ProjectLifecycleParams: ProjectLifecycleParams{ + ProjectID: project.ID, + ProjectName: project.Name, + ProjectSlug: project.Slug, + NetworkID: project.NetworkID.Int64, + NetworkName: networkDB.Name, + Platform: networkDB.Platform, + Boilerplate: project.Boilerplate.String, + }, + Image: image, + Port: port, + Command: command, + Args: args, + Environment: make(map[string]string), + HostIP: hostIP, + } + result, err := lifecycle.PreStart(ctx, preStartParams) + if err != nil { + return fmt.Errorf("pre-start lifecycle hook failed: %w", err) + } + if result != nil { + env = result.Environment + } + } + + // Prepend the command to the args + allArgs := append([]string{command}, args...) + port, err = s.Runner.Start(ctx, fmt.Sprintf("%d", projectID), projectDir, image, port, env, allArgs...) + if err != nil { + return err + } + + // Call PostStart lifecycle hook if available + if lifecycle != nil { + postStartParams := PostStartParams{ + ProjectLifecycleParams: ProjectLifecycleParams{ + ProjectID: project.ID, + ProjectName: project.Name, + ProjectSlug: project.Slug, + NetworkID: project.NetworkID.Int64, + NetworkName: networkDB.Name, + Platform: networkDB.Platform, + Boilerplate: project.Boilerplate.String, + }, + ContainerID: project.ContainerID.String, + Image: image, + Port: port, + StartedAt: time.Now(), + Status: "running", + HostIP: hostIP, + } + if err := lifecycle.PostStart(ctx, postStartParams); err != nil { + // Log the error but don't fail the start operation + zap.L().Error("post-start lifecycle hook failed", + zap.Int64("projectID", project.ID), + zap.Error(err), + ) + } + } + + return nil +} + +// StopProjectServer stops the server process for a project +func (s *ProjectsService) StopProjectServer(ctx context.Context, projectID int64) error { + project, err := s.Queries.GetProject(ctx, projectID) + if err != nil { + return fmt.Errorf("failed to get project: %w", err) + } + + // Get the appropriate lifecycle implementation for the platform + projectDB, err := s.Queries.GetProject(ctx, projectID) + if err != nil { + return fmt.Errorf("failed to get project: %w", err) + } + networkDB, err := s.Queries.GetNetwork(ctx, projectDB.NetworkID.Int64) + if err != nil { + return fmt.Errorf("failed to get network: %w", err) + } + + // Get the appropriate lifecycle implementation for the platform + lifecycle, err := GetPlatformLifecycle(networkDB.Platform, s.Queries, s.OrgService, s.KeyMgmtService, s.NetworkService, zap.L()) + if err != nil { + zap.L().Warn("failed to get platform lifecycle, continuing without lifecycle hooks", + zap.String("platform", project.Boilerplate.String), + zap.Error(err), + ) + } + // Call PreStop lifecycle hook if available + if lifecycle != nil { + preStopParams := PreStopParams{ + ProjectLifecycleParams: ProjectLifecycleParams{ + ProjectID: project.ID, + ProjectName: project.Name, + ProjectSlug: project.Slug, + NetworkID: project.NetworkID.Int64, + NetworkName: networkDB.Name, + Platform: networkDB.Platform, + Boilerplate: project.Boilerplate.String, + }, + ContainerID: project.ContainerID.String, + StartedAt: project.LastStartedAt.Time, + } + if err := lifecycle.PreStop(ctx, preStopParams); err != nil { + // Log the error but don't fail the stop operation + zap.L().Error("pre-stop lifecycle hook failed", + zap.Int64("projectID", project.ID), + zap.Error(err), + ) + } + } + + if err := s.Runner.Stop(fmt.Sprintf("%d", projectID)); err != nil { + return err + } + + // Call PostStop lifecycle hook if available + if lifecycle != nil { + now := time.Now() + postStopParams := PostStopParams{ + ProjectLifecycleParams: ProjectLifecycleParams{ + ProjectID: project.ID, + ProjectName: project.Name, + ProjectSlug: project.Slug, + NetworkID: project.NetworkID.Int64, + NetworkName: networkDB.Name, + Platform: networkDB.Platform, + Boilerplate: project.Boilerplate.String, + }, + ContainerID: project.ContainerID.String, + StartedAt: project.LastStartedAt.Time, + StoppedAt: now, + } + if err := lifecycle.PostStop(ctx, postStopParams); err != nil { + // Log the error but don't fail the stop operation + zap.L().Error("post-stop lifecycle hook failed", + zap.Int64("projectID", project.ID), + zap.Error(err), + ) + } + } + + return nil +} + +func (s *ProjectsService) GetProjectLogs(ctx context.Context, projectID int64) (string, error) { + return s.Runner.GetLogs(fmt.Sprintf("%d", projectID)) +} + +func (s *ProjectsService) StreamProjectLogs(ctx context.Context, projectID int64, onLog func([]byte)) error { + return s.Runner.StreamLogs(ctx, fmt.Sprintf("%d", projectID), onLog) +} diff --git a/pkg/scai/scai.go b/pkg/scai/scai.go new file mode 100644 index 0000000..75e7c41 --- /dev/null +++ b/pkg/scai/scai.go @@ -0,0 +1 @@ +package scai diff --git a/pkg/scai/server/proxy.go b/pkg/scai/server/proxy.go new file mode 100644 index 0000000..6dd4360 --- /dev/null +++ b/pkg/scai/server/proxy.go @@ -0,0 +1,103 @@ +package server + +import ( + "crypto/tls" + "fmt" + "io" + "net" + "strings" + "sync" + + "github.com/chainlaunch/chainlaunch/pkg/logger" +) + +// StartSNIProxy starts a TLS proxy server that routes connections based on SNI slug. +// lookupBackend(slug) should return the backend address (host:port) for the given slug. +func StartSNIProxy(addr, certFile, keyFile string, lookupBackend func(slug string) (string, error), logger *logger.Logger) error { + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + return fmt.Errorf("failed to load TLS cert/key: %w", err) + } + tlsConfig := &tls.Config{ + Certificates: []tls.Certificate{cert}, + GetConfigForClient: func(chi *tls.ClientHelloInfo) (*tls.Config, error) { + // Accept all SNI, but log it + logger.Info("TLS handshake", "server_name", chi.ServerName) + return nil, nil + }, + } + ln, err := tls.Listen("tcp", addr, tlsConfig) + if err != nil { + return fmt.Errorf("failed to listen on %s: %w", addr, err) + } + logger.Info("SNI proxy listening", "address", addr) + for { + conn, err := ln.Accept() + if err != nil { + logger.Error("accept failed", "error", err) + continue + } + go handleSNIConn(conn, lookupBackend, logger) + } +} + +func handleSNIConn(conn net.Conn, lookupBackend func(slug string) (string, error), logger *logger.Logger) { + tlsConn, ok := conn.(*tls.Conn) + if !ok { + logger.Error("connection is not TLS") + conn.Close() + return + } + var closeOnce sync.Once + if err := tlsConn.Handshake(); err != nil { + logger.Error("TLS handshake failed", "error", err) + return + } + sni := tlsConn.ConnectionState().ServerName + if sni == "" { + logger.Error("no SNI provided") + return + } + // Parse slug from SNI: expect {slug}.{domain} + parts := strings.SplitN(sni, ".", 2) + if len(parts) < 2 { + logger.Error("invalid SNI format", "sni", sni) + return + } + slug := parts[0] + backendAddr, err := lookupBackend(slug) + if err != nil { + logger.Error("backend lookup failed", "slug", slug, "error", err) + return + } + backendConn, err := net.Dial("tcp", backendAddr) + if err != nil { + logger.Error("failed to connect to backend", "backend", backendAddr, "error", err) + return + } + // Use a mutex to guard closing both connections + var mu sync.Mutex + closeBoth := func() { + mu.Lock() + defer mu.Unlock() + closeOnce.Do(func() { + backendConn.Close() + conn.Close() + }) + } + logger.Info("proxying connection", "slug", slug, "backend", backendAddr) + // Proxy data in both directions using goroutines and waitgroup + var wg sync.WaitGroup + wg.Add(2) + go func() { + defer wg.Done() + io.Copy(backendConn, tlsConn) + closeBoth() + }() + go func() { + defer wg.Done() + io.Copy(tlsConn, backendConn) + closeBoth() + }() + wg.Wait() +} diff --git a/pkg/scai/sessionchanges/sessionchanges.go b/pkg/scai/sessionchanges/sessionchanges.go new file mode 100644 index 0000000..0b10077 --- /dev/null +++ b/pkg/scai/sessionchanges/sessionchanges.go @@ -0,0 +1,29 @@ +package sessionchanges + +import ( + "sync" +) + +var ( + mu sync.Mutex + modifiedFiles = make(map[string]struct{}) +) + +// RegisterChange registers a file as changed (created/modified/deleted) during the session. +func RegisterChange(filePath string) { + mu.Lock() + defer mu.Unlock() + modifiedFiles[filePath] = struct{}{} +} + +// GetAndResetChanges returns the list of changed files and resets the tracker. +func GetAndResetChanges() []string { + mu.Lock() + defer mu.Unlock() + files := make([]string, 0, len(modifiedFiles)) + for f := range modifiedFiles { + files = append(files, f) + } + modifiedFiles = make(map[string]struct{}) + return files +} diff --git a/pkg/scai/versionmanagement/errors.go b/pkg/scai/versionmanagement/errors.go new file mode 100644 index 0000000..6a5a1ae --- /dev/null +++ b/pkg/scai/versionmanagement/errors.go @@ -0,0 +1,5 @@ +package versionmanagement + +import "errors" + +var ErrUnsupportedOperation = errors.New("unsupported operation") diff --git a/pkg/scai/versionmanagement/git.go b/pkg/scai/versionmanagement/git.go new file mode 100644 index 0000000..707e211 --- /dev/null +++ b/pkg/scai/versionmanagement/git.go @@ -0,0 +1,182 @@ +package versionmanagement + +import ( + "context" + "fmt" + "io" + "strings" + "time" + + git "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" +) + +// CommitChange commits all staged changes with the provided message using go-git. +func (m *DefaultManager) CommitChange(ctx context.Context, message string) error { + repo, err := git.PlainOpen(".") + if err != nil { + return fmt.Errorf("failed to open git repo: %w", err) + } + + w, err := repo.Worktree() + if err != nil { + return fmt.Errorf("failed to get worktree: %w", err) + } + + // Add all changes + if err := w.AddWithOptions(&git.AddOptions{All: true}); err != nil { + return fmt.Errorf("failed to add changes: %w", err) + } + + // Get author info from git config or use default + cfg, err := repo.ConfigScoped(config.SystemScope) + name := "AI Bot" + email := "ai@localhost" + if err == nil && cfg.User.Name != "" && cfg.User.Email != "" { + name = cfg.User.Name + email = cfg.User.Email + } + + commit, err := w.Commit(message, &git.CommitOptions{ + Author: &object.Signature{ + Name: name, + Email: email, + When: time.Now(), + }, + }) + if err != nil { + if err == git.NoErrAlreadyUpToDate { + return nil + } + return fmt.Errorf("failed to commit: %w", err) + } + + _ = commit + return nil +} + +// ListHistory returns the commit history. +func (m *DefaultManager) ListHistory(ctx context.Context) ([]VersionEntry, error) { + repo, err := git.PlainOpen(".") + if err != nil { + return nil, fmt.Errorf("failed to open git repo: %w", err) + } + ref, err := repo.Head() + if err != nil { + return nil, fmt.Errorf("failed to get HEAD: %w", err) + } + cIter, err := repo.Log(&git.LogOptions{From: ref.Hash()}) + if err != nil { + return nil, fmt.Errorf("failed to get log: %w", err) + } + defer cIter.Close() + + var history []VersionEntry + err = cIter.ForEach(func(c *object.Commit) error { + history = append(history, VersionEntry{ + ID: c.Hash.String(), + Author: c.Author.Name, + Timestamp: c.Author.When.Format(time.RFC3339), + Message: c.Message, + }) + return nil + }) + if err != nil { + return nil, fmt.Errorf("failed to iterate commits: %w", err) + } + return history, nil +} + +// CheckoutVersion checks out the specified commit or branch. +func (m *DefaultManager) CheckoutVersion(ctx context.Context, versionID string) error { + repo, err := git.PlainOpen(".") + if err != nil { + return fmt.Errorf("failed to open git repo: %w", err) + } + w, err := repo.Worktree() + if err != nil { + return fmt.Errorf("failed to get worktree: %w", err) + } + // Try as branch first + branchRef := plumbing.NewBranchReferenceName(versionID) + err = w.Checkout(&git.CheckoutOptions{ + Branch: branchRef, + Force: true, + }) + if err == nil { + return nil + } + // Try as commit hash + commitHash := plumbing.NewHash(versionID) + err = w.Checkout(&git.CheckoutOptions{ + Hash: commitHash, + Force: true, + }) + if err != nil { + return fmt.Errorf("failed to checkout version %s: %w", versionID, err) + } + return nil +} + +// GetCurrentVersion returns the current commit info. +func (m *DefaultManager) GetCurrentVersion(ctx context.Context) (VersionEntry, error) { + repo, err := git.PlainOpen(".") + if err != nil { + return VersionEntry{}, fmt.Errorf("failed to open git repo: %w", err) + } + ref, err := repo.Head() + if err != nil { + return VersionEntry{}, fmt.Errorf("failed to get HEAD: %w", err) + } + commit, err := repo.CommitObject(ref.Hash()) + if err != nil { + return VersionEntry{}, fmt.Errorf("failed to get commit: %w", err) + } + return VersionEntry{ + ID: commit.Hash.String(), + Author: commit.Author.Name, + Timestamp: commit.Author.When.Format(time.RFC3339), + Message: commit.Message, + }, nil +} + +// DiffVersions returns the diff between two versions. +func (m *DefaultManager) DiffVersions(ctx context.Context, fromID, toID string) (string, error) { + repo, err := git.PlainOpen(".") + if err != nil { + return "", fmt.Errorf("failed to open git repo: %w", err) + } + fromHash := plumbing.NewHash(fromID) + toHash := plumbing.NewHash(toID) + fromCommit, err := repo.CommitObject(fromHash) + if err != nil { + return "", fmt.Errorf("failed to get from commit: %w", err) + } + toCommit, err := repo.CommitObject(toHash) + if err != nil { + return "", fmt.Errorf("failed to get to commit: %w", err) + } + fromTree, err := fromCommit.Tree() + if err != nil { + return "", fmt.Errorf("failed to get from tree: %w", err) + } + toTree, err := toCommit.Tree() + if err != nil { + return "", fmt.Errorf("failed to get to tree: %w", err) + } + diffs, err := fromTree.Diff(toTree) + if err != nil { + return "", fmt.Errorf("failed to get diff: %w", err) + } + var sb strings.Builder + for _, patch := range diffs { + patchObj, err := patch.Patch() + if err != nil { + return "", fmt.Errorf("failed to get patch: %w", err) + } + io.WriteString(&sb, patchObj.String()) + } + return sb.String(), nil +} diff --git a/pkg/scai/versionmanagement/types.go b/pkg/scai/versionmanagement/types.go new file mode 100644 index 0000000..bfa647b --- /dev/null +++ b/pkg/scai/versionmanagement/types.go @@ -0,0 +1,4 @@ +package versionmanagement + +// VersionEntry and VersionManager are defined in versionmanager.go for now. +// This file is reserved for future shared types and interfaces. diff --git a/pkg/scai/versionmanagement/versionmanager.go b/pkg/scai/versionmanagement/versionmanager.go new file mode 100644 index 0000000..9008cd9 --- /dev/null +++ b/pkg/scai/versionmanagement/versionmanager.go @@ -0,0 +1,198 @@ +package versionmanagement + +import ( + "context" + "io" + "time" + + "bytes" + + "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" +) + +// VersionEntry represents a single version (commit) in the project history. +type VersionEntry struct { + ID string + Author string + Timestamp string + Message string +} + +// VersionManager defines the interface for versioning operations. +type VersionManager interface { + CommitChange(ctx context.Context, message string) error + ListHistory(ctx context.Context) ([]VersionEntry, error) + CheckoutVersion(ctx context.Context, versionID string) error + GetCurrentVersion(ctx context.Context) (VersionEntry, error) + DiffVersions(ctx context.Context, fromID, toID string) (string, error) +} + +// DefaultManager is the default implementation using Git as backend. +type DefaultManager struct{} + +func NewDefaultManager() *DefaultManager { + return &DefaultManager{} +} + +// InitRepo initializes a git repository in the given directory if it does not exist. +func InitRepo(dir string) (*git.Repository, error) { + return git.PlainInit(dir, false) +} + +type CommitWithFileChanges struct { + Hash string + Author string + Timestamp string + Message string + Added []string + Removed []string + Modified []string + Parent *string +} + +// ListCommitsWithFileChanges returns a list of commits for the given directory, with file changes for each commit. +func ListCommitsWithFileChanges(ctx context.Context, repoDir string, maxCommits int) ([]CommitWithFileChanges, error) { + repo, err := git.PlainOpen(repoDir) + if err != nil { + return nil, err + } + ref, err := repo.Head() + if err != nil { + return nil, err + } + cIter, err := repo.Log(&git.LogOptions{From: ref.Hash()}) + if err != nil { + return nil, err + } + defer cIter.Close() + + var commits []CommitWithFileChanges + count := 0 + err = cIter.ForEach(func(c *object.Commit) error { + if maxCommits > 0 && count >= maxCommits { + return io.EOF + } + var added, removed, modified []string + var parentHash *string + if c.NumParents() > 0 { + p := c.ParentHashes[0].String() + parentHash = &p + parent, err := c.Parent(0) + if err == nil { + patch, err := parent.Patch(c) + if err == nil { + parentTree, _ := parent.Tree() + currTree, _ := c.Tree() + for _, stat := range patch.Stats() { + // Check file existence in parent and current tree + _, errParent := parentTree.File(stat.Name) + _, errCurr := currTree.File(stat.Name) + if errParent != nil && errCurr == nil { + // Not in parent, in current: Added + added = append(added, stat.Name) + } else if errParent == nil && errCurr != nil { + // In parent, not in current: Removed + removed = append(removed, stat.Name) + } else if errParent == nil && errCurr == nil { + // In both: Modified + modified = append(modified, stat.Name) + } + } + } + } + } + commits = append(commits, CommitWithFileChanges{ + Hash: c.Hash.String(), + Author: c.Author.Name, + Timestamp: c.Author.When.Format(time.RFC3339), + Message: c.Message, + Added: added, + Removed: removed, + Modified: modified, + Parent: parentHash, + }) + count++ + return nil + }) + if err != nil && err != io.EOF { + return nil, err + } + return commits, nil +} + +// GetFileDiffBetweenCommits returns the diff of a file between two commits in the given repo directory. +func GetFileDiffBetweenCommits(ctx context.Context, repoDir, filePath, fromHash, toHash string) (string, error) { + repo, err := git.PlainOpen(repoDir) + if err != nil { + return "", err + } + fromCommit, err := repo.CommitObject(plumbing.NewHash(fromHash)) + if err != nil { + return "", err + } + toCommit, err := repo.CommitObject(plumbing.NewHash(toHash)) + if err != nil { + return "", err + } + fromTree, err := fromCommit.Tree() + if err != nil { + return "", err + } + toTree, err := toCommit.Tree() + if err != nil { + return "", err + } + fromEntry, _ := fromTree.File(filePath) + toEntry, _ := toTree.File(filePath) + var fromContent, toContent string + if fromEntry != nil { + fromContent, _ = fromEntry.Contents() + } + if toEntry != nil { + toContent, _ = toEntry.Contents() + } + // Use a simple diff (unified format) + udiff := UnifiedDiff(fromContent, toContent, filePath, fromHash, toHash) + return udiff, nil +} + +// UnifiedDiff returns a unified diff string for two file contents. +func UnifiedDiff(a, b, file, fromHash, toHash string) string { + // This is a simple implementation; for more advanced, use a diff library + if a == b { + return "No changes." + } + var buf bytes.Buffer + buf.WriteString("--- " + file + " (" + fromHash + ")\n") + buf.WriteString("+++ " + file + " (" + toHash + ")\n") + buf.WriteString("@@ ... @@\n") + buf.WriteString(b) + return buf.String() +} + +// GetFileAtCommit returns the contents of a file at a specific commit hash in the given repo directory. +func GetFileAtCommit(ctx context.Context, repoDir, filePath, commitHash string) (string, error) { + repo, err := git.PlainOpen(repoDir) + if err != nil { + return "", err + } + commit, err := repo.CommitObject(plumbing.NewHash(commitHash)) + if err != nil { + return "", err + } + tree, err := commit.Tree() + if err != nil { + return "", err + } + file, err := tree.File(filePath) + if err != nil { + return "", err + } + content, err := file.Contents() + if err != nil { + return "", err + } + return content, nil +} diff --git a/sqlc.yaml b/sqlc.yaml index 7a96f18..d58c45c 100644 --- a/sqlc.yaml +++ b/sqlc.yaml @@ -1,7 +1,9 @@ version: "2" sql: - engine: "sqlite" - queries: "pkg/db/queries.sql" + queries: + - "pkg/db/queries.sql" + - "pkg/db/dev-queries.sql" schema: "pkg/db/migrations" gen: go: diff --git a/web/src/components/networks/FabricNetworkDetails.tsx b/web/src/components/networks/FabricNetworkDetails.tsx index ab81b4d..2a43c7a 100644 --- a/web/src/components/networks/FabricNetworkDetails.tsx +++ b/web/src/components/networks/FabricNetworkDetails.tsx @@ -619,8 +619,8 @@ export default function FabricNetworkDetails({ network }: FabricNetworkDetailsPr const peerOrgs = useMemo( () => - Object.keys(channelConfig?.config?.data?.data?.[0]?.payload?.data?.config?.channel_group?.groups?.Application?.groups || {}).filter((mspId) => - fabricOrgs?.items?.find((org) => org.mspId === mspId)!! + Object.keys(channelConfig?.config?.data?.data?.[0]?.payload?.data?.config?.channel_group?.groups?.Application?.groups || {}).filter( + (mspId) => fabricOrgs?.items?.find((org) => org.mspId === mspId)!! ), [channelConfig, fabricOrgs] ) @@ -916,8 +916,8 @@ export default function FabricNetworkDetails({ network }: FabricNetworkDetailsPr {channelConfig?.config?.data?.data?.[0]?.payload?.data?.config?.channel_group?.groups?.Application?.groups && (() => { - const filteredOrgs = Object.entries(channelConfig.config.data.data[0].payload.data.config.channel_group.groups.Application.groups).filter(([mspId]) => - fabricOrgs?.items?.find((org) => org.mspId === mspId)!! + const filteredOrgs = Object.entries(channelConfig.config.data.data[0].payload.data.config.channel_group.groups.Application.groups).filter( + ([mspId]) => fabricOrgs?.items?.find((org) => org.mspId === mspId)!! ) if (filteredOrgs.length === 0) { @@ -993,12 +993,7 @@ export default function FabricNetworkDetails({ network }: FabricNetworkDetailsPr chaincode={
{networkNodes?.nodes?.find((node) => node.status === 'joined' && node.node?.nodeType === 'FABRIC_PEER') ? ( - node.status === 'joined' && node.node?.nodeType === 'FABRIC_PEER')!.node!.id!} - channelName={network.name!} - organizationId={selectedOrg?.id!} - /> + ) : (
diff --git a/web/src/components/networks/chaincode-management.tsx b/web/src/components/networks/chaincode-management.tsx index 0579711..6adab5a 100644 --- a/web/src/components/networks/chaincode-management.tsx +++ b/web/src/components/networks/chaincode-management.tsx @@ -1,120 +1,336 @@ -import { HttpNetworkResponse } from '@/api/client' -import { - getNodesByIdChannelsByChannelIdChaincodesOptions, - postScFabricDeployMutation, - postScFabricPeerByPeerIdChaincodeApproveMutation, - postScFabricPeerByPeerIdChaincodeCommitMutation, - postScFabricPeerByPeerIdChaincodeInstallMutation, -} from '@/api/client/@tanstack/react-query.gen' -import { Button } from '@/components/ui/button' +import { HttpGetNetworkNodesResponse, HttpNetworkResponse } from '@/api/client' +import { getNodesByIdChannelsByChannelIdChaincodesOptions, getOrganizationsOptions } from '@/api/client/@tanstack/react-query.gen' import { Card } from '@/components/ui/card' -import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, DialogTrigger } from '@/components/ui/dialog' -import { Form, FormControl, FormField, FormItem, FormLabel, FormMessage } from '@/components/ui/form' -import { Input } from '@/components/ui/input' import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select' -import { Textarea } from '@/components/ui/textarea' -import { useMutation, useQuery } from '@tanstack/react-query' -import { AlertTriangle, Code, FileCode, Loader2, Plus } from 'lucide-react' -import { useState } from 'react' -import { useForm } from 'react-hook-form' -import { toast } from 'sonner' -import { zodResolver } from '@hookform/resolvers/zod' -import * as z from 'zod' -import { Badge } from '../ui/badge' +import { useQuery } from '@tanstack/react-query' +import { AlertTriangle, Check, Code, Copy } from 'lucide-react' +import { useEffect, useMemo, useState } from 'react' +import ReactMarkdown from 'react-markdown' +import SyntaxHighlighter, { SyntaxHighlighterProps } from 'react-syntax-highlighter' +import { docco } from 'react-syntax-highlighter/dist/esm/styles/hljs' +import rehypeRaw from 'rehype-raw' +import { Skeleton } from '../ui/skeleton' +const SyntaxHighlighterComp = SyntaxHighlighter as unknown as React.ComponentType +// Update the CHAINCODE_INSTRUCTIONS to be a function that takes parameters +const getChainCodeInstructions = (channelName: string, mspId: string) => { + // Get the current origin and append /api/v1 + const apiUrl = typeof window !== 'undefined' ? `${window.location.origin}/api/v1` : 'http://localhost:8100/api/v1' + + return ` +# Chaincode Installation Guide + +## Clone the Repository + +First, clone the chaincode repository: + +\`\`\`bash +git clone https://github.com/kfs-learn/chaincode-typescript +cd chaincode-typescript +\`\`\` + +## Install Required Tools + +### Install bun.sh + +We need to install bun.sh to run the project: + +\`\`\`bash +curl -fsSL https://bun.sh/install | bash +\`\`\` + +### Install Node.JS using NVM + +First, install NVM: + +\`\`\`bash +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash +\`\`\` + +Then, install Node.JS using NVM: + +\`\`\`bash +nvm install v22 +nvm use default v22 +\`\`\` + +### Install Dependencies + +Install the project dependencies: + +\`\`\`bash +bun install +\`\`\` + +## Start Chaincode + +### Pull Network Configuration + +First, set up environment variables and pull the network configuration: + +\`\`\`bash +export CHANNEL_NAME=${channelName} +export MSP_ID=${mspId} +export URL="${apiUrl}" +export CHAINLAUNCH_USER=admin +export CHAINLAUNCH_PASSWORD="" + +chainlaunch fabric network-config pull \\ + --network=$CHANNEL_NAME \\ + --msp-id=$MSP_ID \\ + --url=$URL \\ + --username="$CHAINLAUNCH_USER" \\ + --password="$CHAINLAUNCH_PASSWORD" \\ + --output=network-config.yaml +\`\`\` + +### Start the Chaincode Service + +Set up additional environment variables and start the chaincode: + +\`\`\`bash +export CHANNEL_NAME=${channelName} +export CHAINCODE_NAME=basic +export CHAINCODE_ADDRESS="localhost:9996" # Chaincode listening address +export USER_NAME=admin +export MSP_ID=${mspId} + +chainlaunch fabric install --local \\ + --config=$PWD/network-config.yaml \\ + --channel=$CHANNEL_NAME \\ + --chaincode=$CHAINCODE_NAME \\ + -o $MSP_ID -u $USER_NAME \\ + --policy="OR('\${MSP_ID}.member')" \\ + --chaincodeAddress="\${CHAINCODE_ADDRESS}" \\ + --envFile=$PWD/.env + +bun run build +bun start:dev +\`\`\` + +### Initialize and Test the Chaincode + +Initialize the ledger and verify it's working: + +\`\`\`bash +export CHANNEL_NAME=${channelName} +export CHAINCODE_NAME=basic +export MSP_ID=${mspId} + +# Initialize the ledger +chainlaunch fabric invoke \\ + --chaincode=$CHAINCODE_NAME \\ + --config=network-config.yaml \\ + --channel $CHANNEL_NAME \\ + --fcn InitLedger \\ + --user=admin \\ + --mspID=$MSP_ID + +# Query all assets to verify +chainlaunch fabric query \\ + --chaincode=$CHAINCODE_NAME \\ + --config=network-config.yaml \\ + --channel $CHANNEL_NAME \\ + --fcn GetAllAssets \\ + --user=admin \\ + --mspID=$MSP_ID +\`\`\` +` +} + +function CopyButton({ text }: { text: string }) { + const [copied, setCopied] = useState(false) + + const copy = () => { + navigator.clipboard.writeText(text) + setCopied(true) + setTimeout(() => setCopied(false), 2000) + } + + return ( + + ) +} interface ChaincodeManagementProps { network: HttpNetworkResponse - peerId: number - channelName: string - organizationId: number + channelConfig: Record + networkNodes: HttpGetNetworkNodesResponse } -const chaincodeFormSchema = z.object({ - name: z.string().min(1, 'Name is required'), -}) +export function ChaincodeManagement({ networkNodes, network, channelConfig }: ChaincodeManagementProps) { + const [selectedOrg, setSelectedOrg] = useState<{ id: number; mspId: string } | null>(null) + + const { data: fabricOrgs } = useQuery({ + ...getOrganizationsOptions(), + }) + + const peerOrgs = useMemo( + () => + Object.keys(channelConfig?.config?.data?.data?.[0]?.payload?.data?.config?.channel_group?.groups?.Application?.groups || {}).filter( + (mspId) => fabricOrgs?.items?.find((org) => org.mspId === mspId)!! + ), + [channelConfig, fabricOrgs] + ) + useEffect(() => { + if (peerOrgs?.length) { + const org = fabricOrgs?.items?.find((org) => peerOrgs.includes(org.mspId!)) + if (org) { + setSelectedOrg({ id: org.id!, mspId: org.mspId! }) + } + } + }, [fabricOrgs]) + console.log(selectedOrg) + return ( +
+
+
+ +
+
+

Chaincode Installation

+

Instructions for installing and managing chaincode

+
+
+ + {networkNodes?.nodes?.find((node) => node.status === 'joined' && node.node?.nodeType === 'FABRIC_PEER') && ( + node.status === 'joined' && node.node?.nodeType === 'FABRIC_PEER')!.node!.id!} + /> + )} -type ChaincodeFormValues = z.infer + +
+ +
-export function ChaincodeManagement({ }: ChaincodeManagementProps) { - const [isDeployDialogOpen, setIsDeployDialogOpen] = useState(false) - const [selectedChaincode, setSelectedChaincode] = useState<{ name: string } | null>(null) +
+

{children}

, + h2: ({ children }) =>

{children}

, + h3: ({ children }) =>

{children}

, + h4: ({ children }) =>

{children}

, + h5: ({ children }) =>
{children}
, + h6: ({ children }) =>
{children}
, + code: ({ node, className, children, ...props }) => { + const match = /language-(\w+)/.exec(className || '') + const content = Array.isArray(children) ? children.join('') : String(children) + + return match ? ( +
+ + + {content} + +
+ ) : ( + + {children} + + ) + }, + p: ({ children }) =>

{children}

, + ul: ({ children }) =>
    {children}
, + ol: ({ children }) =>
    {children}
, + blockquote: ({ children }) =>
{children}
, + }} + > + {getChainCodeInstructions(network.name!, selectedOrg?.mspId || '')} +
+
+
+
+ ) +} - const form = useForm({ - resolver: zodResolver(chaincodeFormSchema), - defaultValues: { - name: '', - }, +function CommittedChaincodes({ networkId, channelName, peerId }: { networkId: number; channelName: string; peerId: number }) { + const { data: chaincodes, isLoading } = useQuery({ + ...getNodesByIdChannelsByChannelIdChaincodesOptions({ + path: { + id: peerId, + channelID: channelName, + }, + }), }) - const onSubmit = async (data: ChaincodeFormValues) => { - try { - // For now, just log the chaincode name - console.log('Chaincode name:', data.name) - toast.success('Chaincode name recorded successfully') - setIsDeployDialogOpen(false) - } catch (error: any) { - toast.error('Failed to record chaincode name', { - description: error.message, - }) - } + if (isLoading) { + return + } + + if (!chaincodes || chaincodes.length === 0) { + return ( + +
+ +

No chaincodes have been committed to this channel

+
+
+ ) } return ( -
-
+ +
-

Chaincode Management

-

Record chaincode names for your network

+

Committed Chaincodes

+

Chaincodes that have been committed to this channel

- - - - - - - Record New Chaincode - Record a new chaincode name for your network - -
- - ( - - Name - - - - - - )} - /> - - - - - -
-
-
- -
- -

Chaincode recording is in development. Only names are being recorded for now.

+
+ + + + + + + + + + + {chaincodes.map((chaincode) => ( + + + + + + + ))} + +
NameVersionSequenceInit Required
{chaincode.name}{chaincode.version}{chaincode.sequence}{chaincode.initRequired ? 'Yes' : 'No'}
- -
+
+ ) -} \ No newline at end of file +}