initial commit
This commit is contained in:
commit
f32c78a94b
10
.idea/.gitignore
generated
vendored
Normal file
10
.idea/.gitignore
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Environment-dependent path to Maven home directory
|
||||
/mavenHomeManager.xml
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
6
.idea/misc.xml
generated
Normal file
6
.idea/misc.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_23" default="true" project-jdk-name="temurin-23" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/out" />
|
||||
</component>
|
||||
</project>
|
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/test-ai-code-agents.iml" filepath="$PROJECT_DIR$/.idea/test-ai-code-agents.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
9
.idea/test-ai-code-agents.iml
generated
Normal file
9
.idea/test-ai-code-agents.iml
generated
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
126
package-lock.json
generated
Normal file
126
package-lock.json
generated
Normal file
@ -0,0 +1,126 @@
|
||||
{
|
||||
"name": "test-ai-code-agents",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/body-parser": {
|
||||
"version": "1.19.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz",
|
||||
"integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/connect": "*",
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/connect": {
|
||||
"version": "3.4.38",
|
||||
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
|
||||
"integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/express": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.3.tgz",
|
||||
"integrity": "sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/body-parser": "*",
|
||||
"@types/express-serve-static-core": "^5.0.0",
|
||||
"@types/serve-static": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/express-serve-static-core": {
|
||||
"version": "5.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-5.0.6.tgz",
|
||||
"integrity": "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@types/qs": "*",
|
||||
"@types/range-parser": "*",
|
||||
"@types/send": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/http-errors": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz",
|
||||
"integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/mime": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
|
||||
"integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.15.30",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.30.tgz",
|
||||
"integrity": "sha512-6Q7lr06bEHdlfplU6YRbgG1SFBdlsfNC4/lX+SkhiTs0cpJkOElmWls8PxDFv4yY/xKb8Y6SO0OmSX4wgqTZbA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz",
|
||||
"integrity": "sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/range-parser": {
|
||||
"version": "1.2.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
|
||||
"integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/send": {
|
||||
"version": "0.17.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.5.tgz",
|
||||
"integrity": "sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/mime": "^1",
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/serve-static": {
|
||||
"version": "1.15.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.8.tgz",
|
||||
"integrity": "sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/http-errors": "*",
|
||||
"@types/node": "*",
|
||||
"@types/send": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
5
package.json
Normal file
5
package.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.3"
|
||||
}
|
||||
}
|
27
src/functions/prompts-to-test-spec/.env.example
Normal file
27
src/functions/prompts-to-test-spec/.env.example
Normal file
@ -0,0 +1,27 @@
|
||||
# Main repository configuration
|
||||
MAIN_REPO_URL=https://github.com/Ebitda-SRL/test-ai-code-agents.git
|
||||
# Use either token or username/password for main repo
|
||||
MAIN_REPO_TOKEN=your_token_here
|
||||
# OR
|
||||
MAIN_REPO_USERNAME=your_username_here
|
||||
MAIN_REPO_PASSWORD=your_password_here
|
||||
|
||||
# GitHub credentials
|
||||
# Use either token or username/password for GitHub
|
||||
GITHUB_TOKEN=your_github_token_here
|
||||
# OR
|
||||
GITHUB_USERNAME=your_github_username_here
|
||||
GITHUB_PASSWORD=your_github_password_here
|
||||
|
||||
# Gitea credentials
|
||||
GITEA_USERNAME=your_gitea_username_here
|
||||
GITEA_PASSWORD=your_gitea_password_here
|
||||
|
||||
# Google Cloud configuration
|
||||
GOOGLE_CLOUD_PROJECT_ID=your_gcp_project_id_here
|
||||
GOOGLE_CLOUD_LOCATION=us-central1
|
||||
GEMINI_MODEL=gemini-1.5-pro
|
||||
|
||||
# Function configuration
|
||||
# Set to 'true' to enable debug logging
|
||||
DEBUG=false
|
2
src/functions/prompts-to-test-spec/.gitignore
vendored
Normal file
2
src/functions/prompts-to-test-spec/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
dist/
|
135
src/functions/prompts-to-test-spec/README.md
Normal file
135
src/functions/prompts-to-test-spec/README.md
Normal file
@ -0,0 +1,135 @@
|
||||
# Prompts to Test Spec
|
||||
|
||||
A Google Cloud Function that processes workitem prompts and generates test specifications.
|
||||
|
||||
## Overview
|
||||
|
||||
This function:
|
||||
1. Clones the main repository containing prompts
|
||||
2. Iterates over each project in the prompts/ directory
|
||||
3. Clones the project repository
|
||||
4. Uses the Gemini API to apply guidelines from the project's AI.md file
|
||||
5. Creates a pull request in the project repository with the generated test specifications
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js 20 or later
|
||||
- Google Cloud CLI
|
||||
- Google Cloud account with access to:
|
||||
- Cloud Functions
|
||||
- Vertex AI (for Gemini API)
|
||||
- Git credentials for the repositories
|
||||
|
||||
## Setup
|
||||
|
||||
1. Clone this repository
|
||||
2. Navigate to the function directory:
|
||||
```
|
||||
cd src/functions/prompts-to-test-spec
|
||||
```
|
||||
3. Install dependencies:
|
||||
```
|
||||
npm install
|
||||
```
|
||||
4. Create a `.env` file based on the `.env.example` template:
|
||||
```
|
||||
cp .env.example .env
|
||||
```
|
||||
5. Edit the `.env` file with your credentials and configuration
|
||||
|
||||
## Environment Variables
|
||||
|
||||
The function requires several environment variables to be set:
|
||||
|
||||
### Main Repository Configuration
|
||||
- `MAIN_REPO_URL`: URL of the main repository containing prompts
|
||||
- `MAIN_REPO_TOKEN` or `MAIN_REPO_USERNAME`/`MAIN_REPO_PASSWORD`: Credentials for the main repository
|
||||
|
||||
### GitHub Credentials
|
||||
- `GITHUB_TOKEN` or `GITHUB_USERNAME`/`GITHUB_PASSWORD`: Credentials for GitHub repositories
|
||||
|
||||
### Gitea Credentials
|
||||
- `GITEA_USERNAME`/`GITEA_PASSWORD`: Credentials for Gitea repositories
|
||||
|
||||
### Google Cloud Configuration
|
||||
- `GOOGLE_CLOUD_PROJECT_ID`: Your Google Cloud project ID
|
||||
- `GOOGLE_CLOUD_LOCATION`: Google Cloud region (default: us-central1)
|
||||
- `GEMINI_MODEL`: Gemini model to use (default: gemini-1.5-pro)
|
||||
|
||||
## Local Development
|
||||
|
||||
To run the function locally:
|
||||
|
||||
1. Build the function:
|
||||
```
|
||||
npm run build
|
||||
```
|
||||
|
||||
2. Start the function:
|
||||
```
|
||||
npm start
|
||||
```
|
||||
|
||||
3. Test the HTTP endpoint:
|
||||
```
|
||||
curl http://localhost:8080
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Run the tests:
|
||||
```
|
||||
npm test
|
||||
```
|
||||
|
||||
Run tests in watch mode:
|
||||
```
|
||||
npm run test:watch
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### HTTP Trigger
|
||||
|
||||
Deploy the function with an HTTP trigger:
|
||||
```
|
||||
npm run deploy
|
||||
```
|
||||
|
||||
### Event Trigger
|
||||
|
||||
Deploy the function with a Cloud Storage event trigger:
|
||||
```
|
||||
npm run deploy:event
|
||||
```
|
||||
|
||||
Note: You'll need to update the `YOUR_BUCKET_NAME` in the package.json file with your actual Cloud Storage bucket name.
|
||||
|
||||
## Architecture
|
||||
|
||||
The function is organized into several services:
|
||||
|
||||
- **RepositoryService**: Handles Git operations like cloning repositories and creating branches
|
||||
- **ProjectService**: Finds and processes projects and workitems
|
||||
- **GeminiService**: Interacts with the Gemini API to generate test specifications
|
||||
- **PullRequestService**: Creates pull requests in project repositories
|
||||
- **ProcessorService**: Orchestrates the entire process
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
src/
|
||||
├── index.ts # Main entry point
|
||||
├── types.ts # Type definitions
|
||||
└── services/ # Service modules
|
||||
├── repository-service.ts # Git operations
|
||||
├── project-service.ts # Project and workitem processing
|
||||
├── gemini-service.ts # Gemini API integration
|
||||
├── pull-request-service.ts # Pull request creation
|
||||
├── processor-service.ts # Process orchestration
|
||||
└── __tests__/ # Unit tests
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License.
|
19
src/functions/prompts-to-test-spec/jest.config.js
Normal file
19
src/functions/prompts-to-test-spec/jest.config.js
Normal file
@ -0,0 +1,19 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
roots: ['<rootDir>/src'],
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
collectCoverageFrom: [
|
||||
'src/**/*.ts',
|
||||
'!src/**/*.d.ts',
|
||||
'!src/**/__tests__/**',
|
||||
],
|
||||
coverageDirectory: 'coverage',
|
||||
transform: {
|
||||
'^.+\\.tsx?$': ['ts-jest', {
|
||||
tsconfig: 'tsconfig.json',
|
||||
}],
|
||||
},
|
||||
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
|
||||
setupFiles: ['dotenv/config'],
|
||||
};
|
5766
src/functions/prompts-to-test-spec/package-lock.json
generated
Normal file
5766
src/functions/prompts-to-test-spec/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
36
src/functions/prompts-to-test-spec/package.json
Normal file
36
src/functions/prompts-to-test-spec/package.json
Normal file
@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "prompts-to-test-spec",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"prestart": "npm run build",
|
||||
"deploy": "gcloud functions deploy promptToTestSpecHttp --gen2 --runtime=nodejs20 --source=. --trigger-http --allow-unauthenticated",
|
||||
"deploy:event": "gcloud functions deploy promptToTestSpecEvent --gen2 --runtime=nodejs20 --source=. --trigger-event=google.cloud.storage.object.v1.finalized --trigger-resource=YOUR_BUCKET_NAME",
|
||||
"clean": "rm -rf dist",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"dependencies": {
|
||||
"@google-cloud/functions-framework": "^3.0.0",
|
||||
"@google-cloud/vertexai": "^0.5.0",
|
||||
"axios": "^1.6.7",
|
||||
"dotenv": "^16.4.5",
|
||||
"simple-git": "^3.23.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^5.0.3",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^20.11.30",
|
||||
"jest": "^29.7.0",
|
||||
"ts-jest": "^29.1.2",
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
]
|
||||
}
|
101
src/functions/prompts-to-test-spec/src/config.ts
Normal file
101
src/functions/prompts-to-test-spec/src/config.ts
Normal file
@ -0,0 +1,101 @@
|
||||
/**
|
||||
* Configuration module for loading environment variables
|
||||
*/
|
||||
import * as dotenv from 'dotenv';
|
||||
import * as path from 'path';
|
||||
|
||||
// Load environment variables from .env file
|
||||
dotenv.config({ path: path.resolve(__dirname, '../.env') });
|
||||
|
||||
// Main repository configuration
|
||||
export const MAIN_REPO_URL = process.env.MAIN_REPO_URL || '';
|
||||
export const MAIN_REPO_TOKEN = process.env.MAIN_REPO_TOKEN;
|
||||
export const MAIN_REPO_USERNAME = process.env.MAIN_REPO_USERNAME;
|
||||
export const MAIN_REPO_PASSWORD = process.env.MAIN_REPO_PASSWORD;
|
||||
|
||||
// GitHub credentials
|
||||
export const GITHUB_TOKEN = process.env.GITHUB_TOKEN;
|
||||
export const GITHUB_USERNAME = process.env.GITHUB_USERNAME;
|
||||
export const GITHUB_PASSWORD = process.env.GITHUB_PASSWORD;
|
||||
|
||||
// Gitea credentials
|
||||
export const GITEA_USERNAME = process.env.GITEA_USERNAME;
|
||||
export const GITEA_PASSWORD = process.env.GITEA_PASSWORD;
|
||||
|
||||
// Google Cloud configuration
|
||||
export const GOOGLE_CLOUD_PROJECT_ID = process.env.GOOGLE_CLOUD_PROJECT_ID || '';
|
||||
export const GOOGLE_CLOUD_LOCATION = process.env.GOOGLE_CLOUD_LOCATION || 'us-central1';
|
||||
export const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-1.5-pro';
|
||||
|
||||
// Function configuration
|
||||
export const DEBUG = process.env.DEBUG === 'true';
|
||||
|
||||
// Validate required configuration
|
||||
export function validateConfig(): void {
|
||||
const missingVars: string[] = [];
|
||||
|
||||
if (!MAIN_REPO_URL) {
|
||||
missingVars.push('MAIN_REPO_URL');
|
||||
}
|
||||
|
||||
if (!MAIN_REPO_TOKEN && (!MAIN_REPO_USERNAME || !MAIN_REPO_PASSWORD)) {
|
||||
missingVars.push('MAIN_REPO_TOKEN or MAIN_REPO_USERNAME/MAIN_REPO_PASSWORD');
|
||||
}
|
||||
|
||||
if (!GOOGLE_CLOUD_PROJECT_ID) {
|
||||
missingVars.push('GOOGLE_CLOUD_PROJECT_ID');
|
||||
}
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
throw new Error(`Missing required environment variables: ${missingVars.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Get repository credentials for the main repository
|
||||
export function getMainRepoCredentials(): { type: 'username-password' | 'token'; username?: string; password?: string; token?: string } {
|
||||
if (MAIN_REPO_TOKEN) {
|
||||
return {
|
||||
type: 'token',
|
||||
token: MAIN_REPO_TOKEN
|
||||
};
|
||||
} else if (MAIN_REPO_USERNAME && MAIN_REPO_PASSWORD) {
|
||||
return {
|
||||
type: 'username-password',
|
||||
username: MAIN_REPO_USERNAME,
|
||||
password: MAIN_REPO_PASSWORD
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('No credentials available for the main repository');
|
||||
}
|
||||
|
||||
// Get GitHub credentials
|
||||
export function getGithubCredentials(): { type: 'username-password' | 'token'; username?: string; password?: string; token?: string } | undefined {
|
||||
if (GITHUB_TOKEN) {
|
||||
return {
|
||||
type: 'token',
|
||||
token: GITHUB_TOKEN
|
||||
};
|
||||
} else if (GITHUB_USERNAME && GITHUB_PASSWORD) {
|
||||
return {
|
||||
type: 'username-password',
|
||||
username: GITHUB_USERNAME,
|
||||
password: GITHUB_PASSWORD
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Get Gitea credentials
|
||||
export function getGiteaCredentials(): { type: 'username-password'; username: string; password: string } | undefined {
|
||||
if (GITEA_USERNAME && GITEA_PASSWORD) {
|
||||
return {
|
||||
type: 'username-password',
|
||||
username: GITEA_USERNAME,
|
||||
password: GITEA_PASSWORD
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
47
src/functions/prompts-to-test-spec/src/index.ts
Normal file
47
src/functions/prompts-to-test-spec/src/index.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import {CloudEvent, cloudEvent, http} from '@google-cloud/functions-framework';
|
||||
import { ProcessorService } from './services/processor-service';
|
||||
import { validateConfig } from './config';
|
||||
|
||||
// Validate configuration on startup
|
||||
try {
|
||||
validateConfig();
|
||||
} catch (error) {
|
||||
console.error('Configuration error:', error.message);
|
||||
// Don't throw here to allow the function to start, but it will fail when executed
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP endpoint for the prompts-to-test-spec function
|
||||
*/
|
||||
http('promptToTestSpecHttp', async (req, res): Promise<void> => {
|
||||
try {
|
||||
const processor = new ProcessorService();
|
||||
const results = await processor.processProjects();
|
||||
|
||||
res.status(200).json({
|
||||
success: true,
|
||||
results
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error processing projects:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Cloud Event handler for the prompts-to-test-spec function
|
||||
*/
|
||||
cloudEvent('promptToTestSpecEvent', async (event: CloudEvent<any>): Promise<void> => {
|
||||
try {
|
||||
console.log('Received event:', event.type);
|
||||
const processor = new ProcessorService();
|
||||
await processor.processProjects();
|
||||
console.log('Processing completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Error processing projects:', error);
|
||||
throw error;
|
||||
}
|
||||
});
|
@ -0,0 +1,206 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { ProjectService } from '../project-service';
|
||||
|
||||
// Mock fs and path modules
|
||||
jest.mock('fs');
|
||||
jest.mock('path');
|
||||
|
||||
describe('ProjectService', () => {
|
||||
let projectService: ProjectService;
|
||||
|
||||
beforeEach(() => {
|
||||
projectService = new ProjectService();
|
||||
|
||||
// Reset all mocks
|
||||
jest.resetAllMocks();
|
||||
|
||||
// Mock path.join to return predictable paths
|
||||
(path.join as jest.Mock).mockImplementation((...args) => args.join('/'));
|
||||
});
|
||||
|
||||
describe('findProjects', () => {
|
||||
it('should find all projects in the prompts directory', async () => {
|
||||
// Mock fs.readdirSync to return project directories
|
||||
(fs.readdirSync as jest.Mock).mockReturnValueOnce([
|
||||
{ name: 'project1', isDirectory: () => true },
|
||||
{ name: 'project2', isDirectory: () => true },
|
||||
{ name: 'not-a-project', isDirectory: () => true },
|
||||
{ name: 'README.md', isDirectory: () => false }
|
||||
]);
|
||||
|
||||
// Mock fs.existsSync to return true for INFO.md files
|
||||
(fs.existsSync as jest.Mock).mockImplementation((path: string) => {
|
||||
return path.endsWith('project1/INFO.md') || path.endsWith('project2/INFO.md');
|
||||
});
|
||||
|
||||
// Mock readProjectInfo
|
||||
jest.spyOn(projectService, 'readProjectInfo').mockImplementation(async (projectPath, projectName) => {
|
||||
return {
|
||||
name: projectName,
|
||||
path: projectPath,
|
||||
repoHost: 'https://github.com',
|
||||
repoUrl: `https://github.com/org/${projectName}.git`,
|
||||
jiraComponent: projectName
|
||||
};
|
||||
});
|
||||
|
||||
const projects = await projectService.findProjects('prompts');
|
||||
|
||||
expect(projects).toHaveLength(2);
|
||||
expect(projects[0].name).toBe('project1');
|
||||
expect(projects[1].name).toBe('project2');
|
||||
expect(fs.readdirSync).toHaveBeenCalledWith('prompts', { withFileTypes: true });
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('prompts/project1/INFO.md');
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('prompts/project2/INFO.md');
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('prompts/not-a-project/INFO.md');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readProjectInfo', () => {
|
||||
it('should read project information from INFO.md', async () => {
|
||||
const infoContent = `# Project Name
|
||||
|
||||
- [x] Repo host: https://github.com
|
||||
- [x] Repo url: https://github.com/org/project.git
|
||||
- [x] Jira component: project-component
|
||||
`;
|
||||
|
||||
// Mock fs.readFileSync to return INFO.md content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValueOnce(infoContent);
|
||||
|
||||
const project = await projectService.readProjectInfo('path/to/project', 'project');
|
||||
|
||||
expect(project).toEqual({
|
||||
name: 'project',
|
||||
path: 'path/to/project',
|
||||
repoHost: 'https://github.com',
|
||||
repoUrl: 'https://github.com/org/project.git',
|
||||
jiraComponent: 'project-component'
|
||||
});
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/project/INFO.md', 'utf-8');
|
||||
});
|
||||
});
|
||||
|
||||
describe('findWorkitems', () => {
|
||||
it('should find all workitems in a project', async () => {
|
||||
// Mock fs.existsSync to return true for workitems directory
|
||||
(fs.existsSync as jest.Mock).mockReturnValueOnce(true);
|
||||
|
||||
// Mock fs.readdirSync to return workitem files
|
||||
(fs.readdirSync as jest.Mock).mockReturnValueOnce([
|
||||
'workitem1.md',
|
||||
'workitem2.md',
|
||||
'not-a-workitem.txt'
|
||||
]);
|
||||
|
||||
// Mock readWorkitemInfo
|
||||
jest.spyOn(projectService, 'readWorkitemInfo').mockImplementation(async (workitemPath, fileName) => {
|
||||
return {
|
||||
name: fileName.replace('.md', ''),
|
||||
path: workitemPath,
|
||||
title: `Workitem ${fileName.replace('.md', '')}`,
|
||||
description: 'Description',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
};
|
||||
});
|
||||
|
||||
const workitems = await projectService.findWorkitems('path/to/project');
|
||||
|
||||
expect(workitems).toHaveLength(2);
|
||||
expect(workitems[0].name).toBe('workitem1');
|
||||
expect(workitems[1].name).toBe('workitem2');
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/project/workitems');
|
||||
expect(fs.readdirSync).toHaveBeenCalledWith('path/to/project/workitems');
|
||||
});
|
||||
|
||||
it('should return empty array if workitems directory does not exist', async () => {
|
||||
// Mock fs.existsSync to return false for workitems directory
|
||||
(fs.existsSync as jest.Mock).mockReturnValueOnce(false);
|
||||
|
||||
const workitems = await projectService.findWorkitems('path/to/project');
|
||||
|
||||
expect(workitems).toHaveLength(0);
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/project/workitems');
|
||||
expect(fs.readdirSync).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('readWorkitemInfo', () => {
|
||||
it('should read workitem information from markdown file', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
It has multiple lines.
|
||||
|
||||
- [x] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
- [x] Active
|
||||
`;
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
|
||||
|
||||
const workitem = await projectService.readWorkitemInfo('path/to/workitem.md', 'workitem.md');
|
||||
|
||||
expect(workitem).toEqual({
|
||||
name: 'workitem',
|
||||
path: 'path/to/workitem.md',
|
||||
title: 'Workitem Title',
|
||||
description: 'This is a description of the workitem.\nIt has multiple lines.',
|
||||
jiraReference: 'JIRA-123',
|
||||
implementation: '',
|
||||
isActive: true
|
||||
});
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
|
||||
});
|
||||
|
||||
it('should handle workitem without Active checkbox', async () => {
|
||||
const workitemContent = `## Workitem Title
|
||||
|
||||
This is a description of the workitem.
|
||||
|
||||
- [ ] Jira: JIRA-123
|
||||
- [ ] Implementation:
|
||||
`;
|
||||
|
||||
// Mock fs.readFileSync to return workitem content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
|
||||
|
||||
const workitem = await projectService.readWorkitemInfo('path/to/workitem.md', 'workitem.md');
|
||||
|
||||
expect(workitem.isActive).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('readProjectGuidelines', () => {
|
||||
it('should read AI guidelines for a project', async () => {
|
||||
const guidelinesContent = '## Guidelines\n\nThese are the guidelines.';
|
||||
|
||||
// Mock fs.existsSync to return true for AI.md
|
||||
(fs.existsSync as jest.Mock).mockReturnValueOnce(true);
|
||||
|
||||
// Mock fs.readFileSync to return guidelines content
|
||||
(fs.readFileSync as jest.Mock).mockReturnValueOnce(guidelinesContent);
|
||||
|
||||
const guidelines = await projectService.readProjectGuidelines('path/to/project');
|
||||
|
||||
expect(guidelines).toBe(guidelinesContent);
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/project/AI.md');
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/project/AI.md', 'utf-8');
|
||||
});
|
||||
|
||||
it('should return empty string if AI.md does not exist', async () => {
|
||||
// Mock fs.existsSync to return false for AI.md
|
||||
(fs.existsSync as jest.Mock).mockReturnValueOnce(false);
|
||||
|
||||
const guidelines = await projectService.readProjectGuidelines('path/to/project');
|
||||
|
||||
expect(guidelines).toBe('');
|
||||
expect(fs.existsSync).toHaveBeenCalledWith('path/to/project/AI.md');
|
||||
expect(fs.readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,155 @@
|
||||
/**
|
||||
* Service for handling Gemini API operations
|
||||
*/
|
||||
import { VertexAI } from '@google-cloud/vertexai';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Project, Workitem } from '../types';
|
||||
import { GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL } from '../config';
|
||||
|
||||
export class GeminiService {
|
||||
private vertexAI: VertexAI;
|
||||
private model: string;
|
||||
private projectId: string;
|
||||
private location: string;
|
||||
|
||||
constructor(projectId?: string, location?: string, model?: string) {
|
||||
this.projectId = projectId || GOOGLE_CLOUD_PROJECT_ID;
|
||||
this.location = location || GOOGLE_CLOUD_LOCATION;
|
||||
this.model = model || GEMINI_MODEL;
|
||||
|
||||
if (!this.projectId) {
|
||||
throw new Error('Google Cloud Project ID is required');
|
||||
}
|
||||
|
||||
this.vertexAI = new VertexAI({
|
||||
project: this.projectId,
|
||||
location: this.location,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply project guidelines to a workitem
|
||||
* @param project Project information
|
||||
* @param workitem Workitem to process
|
||||
* @param projectRepoPath Path to the cloned project repository
|
||||
* @returns Result of the processing
|
||||
*/
|
||||
async processWorkitem(
|
||||
project: Project,
|
||||
workitem: Workitem,
|
||||
projectRepoPath: string
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
// Skip inactive workitems
|
||||
if (!workitem.isActive) {
|
||||
console.log(`Skipping inactive workitem: ${workitem.name}`);
|
||||
|
||||
// If the feature file exists, it should be deleted according to guidelines
|
||||
const featureFileName = `${workitem.name}.feature`;
|
||||
const featurePath = path.join(projectRepoPath, 'nitro-it', 'src', 'test', 'resources', 'workitems', featureFileName);
|
||||
|
||||
if (fs.existsSync(featurePath)) {
|
||||
fs.unlinkSync(featurePath);
|
||||
console.log(`Deleted feature file for inactive workitem: ${featurePath}`);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// Read project guidelines
|
||||
const projectGuidelines = await this.readProjectGuidelines(project.path);
|
||||
|
||||
// Read workitem content
|
||||
const workitemContent = fs.readFileSync(workitem.path, 'utf-8');
|
||||
|
||||
// Generate feature file content using Gemini API
|
||||
const featureContent = await this.generateFeatureFile(
|
||||
projectGuidelines,
|
||||
workitemContent,
|
||||
workitem.name
|
||||
);
|
||||
|
||||
// Ensure the target directory exists
|
||||
const targetDir = path.join(projectRepoPath, 'nitro-it', 'src', 'test', 'resources', 'workitems');
|
||||
if (!fs.existsSync(targetDir)) {
|
||||
fs.mkdirSync(targetDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Write the feature file
|
||||
const featureFileName = `${workitem.name}.feature`;
|
||||
const featurePath = path.join(targetDir, featureFileName);
|
||||
fs.writeFileSync(featurePath, featureContent);
|
||||
|
||||
console.log(`Created/updated feature file: ${featurePath}`);
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error(`Error processing workitem ${workitem.name}:`, error);
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read AI guidelines for a project
|
||||
* @param projectPath Path to the project directory
|
||||
* @returns AI guidelines content
|
||||
*/
|
||||
private async readProjectGuidelines(projectPath: string): Promise<string> {
|
||||
const aiPath = path.join(projectPath, 'AI.md');
|
||||
|
||||
if (!fs.existsSync(aiPath)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return fs.readFileSync(aiPath, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate feature file content using Gemini API
|
||||
* @param guidelines Project guidelines
|
||||
* @param workitemContent Workitem content
|
||||
* @param workitemName Name of the workitem
|
||||
* @returns Generated feature file content
|
||||
*/
|
||||
private async generateFeatureFile(
|
||||
guidelines: string,
|
||||
workitemContent: string,
|
||||
workitemName: string
|
||||
): Promise<string> {
|
||||
const generativeModel = this.vertexAI.getGenerativeModel({
|
||||
model: this.model,
|
||||
});
|
||||
|
||||
const currentDate = new Date().toISOString();
|
||||
|
||||
const prompt = `
|
||||
You are tasked with creating a Cucumber feature file based on a workitem description.
|
||||
|
||||
Project Guidelines:
|
||||
${guidelines}
|
||||
|
||||
Workitem:
|
||||
${workitemContent}
|
||||
|
||||
Create a Cucumber feature file that implements this workitem according to the guidelines.
|
||||
Include the following comment at the top of the file:
|
||||
# Generated by prompts-to-test-spec on ${currentDate}
|
||||
# Source: ${workitemName}
|
||||
|
||||
The feature file should be complete and ready to use in a Cucumber test suite.
|
||||
`;
|
||||
|
||||
const result = await generativeModel.generateContent({
|
||||
contents: [{ role: 'user', parts: [{ text: prompt }] }],
|
||||
});
|
||||
|
||||
const response = await result.response;
|
||||
const generatedText = response.candidates[0].content.parts[0].text;
|
||||
|
||||
return generatedText;
|
||||
}
|
||||
}
|
@ -0,0 +1,214 @@
|
||||
/**
|
||||
* Service for orchestrating the entire process
|
||||
*/
|
||||
import * as path from 'path';
|
||||
import { Project, ProcessResult, RepoCredentials } from '../types';
|
||||
import { RepositoryService } from './repository-service';
|
||||
import { ProjectService } from './project-service';
|
||||
import { GeminiService } from './gemini-service';
|
||||
import { PullRequestService } from './pull-request-service';
|
||||
import {
|
||||
MAIN_REPO_URL,
|
||||
validateConfig,
|
||||
getMainRepoCredentials,
|
||||
getGithubCredentials,
|
||||
getGiteaCredentials,
|
||||
GOOGLE_CLOUD_PROJECT_ID,
|
||||
GOOGLE_CLOUD_LOCATION,
|
||||
GEMINI_MODEL
|
||||
} from '../config';
|
||||
|
||||
export class ProcessorService {
|
||||
private repositoryService: RepositoryService;
|
||||
private projectService: ProjectService;
|
||||
private geminiService: GeminiService;
|
||||
private pullRequestService: PullRequestService;
|
||||
private mainRepoUrl: string;
|
||||
private mainRepoCredentials: RepoCredentials;
|
||||
private giteaCredentials?: RepoCredentials;
|
||||
private githubCredentials?: RepoCredentials;
|
||||
|
||||
constructor() {
|
||||
// Validate configuration
|
||||
validateConfig();
|
||||
|
||||
// Initialize services
|
||||
this.repositoryService = new RepositoryService();
|
||||
this.projectService = new ProjectService();
|
||||
this.geminiService = new GeminiService(
|
||||
GOOGLE_CLOUD_PROJECT_ID,
|
||||
GOOGLE_CLOUD_LOCATION,
|
||||
GEMINI_MODEL
|
||||
);
|
||||
this.pullRequestService = new PullRequestService();
|
||||
|
||||
// Get main repository URL and credentials
|
||||
this.mainRepoUrl = MAIN_REPO_URL;
|
||||
this.mainRepoCredentials = getMainRepoCredentials();
|
||||
|
||||
// Initialize other credentials
|
||||
this.githubCredentials = getGithubCredentials();
|
||||
this.giteaCredentials = getGiteaCredentials();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get credentials for a project based on its repository host
|
||||
* @param project Project information
|
||||
* @returns Credentials for the project repository
|
||||
*/
|
||||
private getCredentialsForProject(project: Project): RepoCredentials {
|
||||
if (!project.repoHost) {
|
||||
throw new Error(`Repository host not found for project ${project.name}`);
|
||||
}
|
||||
|
||||
if (project.repoHost.includes('github.com')) {
|
||||
if (!this.githubCredentials) {
|
||||
throw new Error('GitHub credentials not found');
|
||||
}
|
||||
return this.githubCredentials;
|
||||
} else if (project.repoHost.includes('gitea')) {
|
||||
if (!this.giteaCredentials) {
|
||||
throw new Error('Gitea credentials not found');
|
||||
}
|
||||
return this.giteaCredentials;
|
||||
} else {
|
||||
throw new Error(`Unsupported repository host: ${project.repoHost}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process all projects in the main repository
|
||||
* @returns Array of process results
|
||||
*/
|
||||
async processProjects(): Promise<ProcessResult[]> {
|
||||
const results: ProcessResult[] = [];
|
||||
|
||||
try {
|
||||
// Clone the main repository
|
||||
console.log(`Cloning main repository: ${this.mainRepoUrl}`);
|
||||
const mainRepoPath = await this.repositoryService.cloneMainRepository(
|
||||
this.mainRepoUrl,
|
||||
this.mainRepoCredentials
|
||||
);
|
||||
|
||||
// Find all projects in the prompts directory
|
||||
const promptsDir = path.join(mainRepoPath, 'src', 'prompts');
|
||||
console.log(`Finding projects in: ${promptsDir}`);
|
||||
const projects = await this.projectService.findProjects(promptsDir);
|
||||
|
||||
console.log(`Found ${projects.length} projects`);
|
||||
|
||||
// Process each project
|
||||
for (const project of projects) {
|
||||
try {
|
||||
const result = await this.processProject(project);
|
||||
results.push(result);
|
||||
} catch (error) {
|
||||
console.error(`Error processing project ${project.name}:`, error);
|
||||
results.push({
|
||||
project,
|
||||
processedWorkitems: [],
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
console.error('Error processing projects:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single project
|
||||
* @param project Project information
|
||||
* @returns Process result
|
||||
*/
|
||||
async processProject(project: Project): Promise<ProcessResult> {
|
||||
console.log(`Processing project: ${project.name}`);
|
||||
|
||||
// Find all workitems in the project
|
||||
const workitems = await this.projectService.findWorkitems(project.path);
|
||||
console.log(`Found ${workitems.length} workitems in project ${project.name}`);
|
||||
|
||||
// Skip if no workitems found
|
||||
if (workitems.length === 0) {
|
||||
return {
|
||||
project,
|
||||
processedWorkitems: []
|
||||
};
|
||||
}
|
||||
|
||||
// Skip if no repository URL
|
||||
if (!project.repoUrl) {
|
||||
console.log(`Skipping project ${project.name}: No repository URL found`);
|
||||
return {
|
||||
project,
|
||||
processedWorkitems: []
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
// Get credentials for the project
|
||||
const credentials = this.getCredentialsForProject(project);
|
||||
|
||||
// Clone the project repository
|
||||
console.log(`Cloning project repository: ${project.repoUrl}`);
|
||||
const projectRepoPath = await this.repositoryService.cloneProjectRepository(project, credentials);
|
||||
|
||||
// Create a new branch for changes
|
||||
const branchName = `update-workitems-${new Date().toISOString().split('T')[0]}`;
|
||||
await this.repositoryService.createBranch(projectRepoPath, branchName);
|
||||
|
||||
// Process each workitem
|
||||
const processedWorkitems = [];
|
||||
for (const workitem of workitems) {
|
||||
console.log(`Processing workitem: ${workitem.name}`);
|
||||
const result = await this.geminiService.processWorkitem(project, workitem, projectRepoPath);
|
||||
processedWorkitems.push({ workitem, ...result });
|
||||
}
|
||||
|
||||
// If no changes were made, return early
|
||||
if (processedWorkitems.length === 0) {
|
||||
console.log(`No workitems processed for project ${project.name}`);
|
||||
return {
|
||||
project,
|
||||
processedWorkitems: []
|
||||
};
|
||||
}
|
||||
|
||||
// Commit changes
|
||||
await this.repositoryService.commitChanges(
|
||||
projectRepoPath,
|
||||
`Update workitems: ${new Date().toISOString().split('T')[0]}`
|
||||
);
|
||||
|
||||
// Push changes
|
||||
await this.repositoryService.pushChanges(projectRepoPath, branchName, credentials);
|
||||
|
||||
// Create pull request
|
||||
const pullRequestUrl = await this.pullRequestService.createPullRequest(
|
||||
project,
|
||||
branchName,
|
||||
processedWorkitems,
|
||||
credentials
|
||||
);
|
||||
|
||||
console.log(`Created pull request: ${pullRequestUrl}`);
|
||||
|
||||
return {
|
||||
project,
|
||||
processedWorkitems,
|
||||
pullRequestUrl
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing project ${project.name}:`, error);
|
||||
return {
|
||||
project,
|
||||
processedWorkitems: [],
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,151 @@
|
||||
/**
|
||||
* Service for handling project operations
|
||||
*/
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Project, Workitem } from '../types';
|
||||
|
||||
export class ProjectService {
|
||||
/**
|
||||
* Find all projects in the prompts directory
|
||||
* @param promptsDir Path to the prompts directory
|
||||
* @returns Array of projects
|
||||
*/
|
||||
async findProjects(promptsDir: string): Promise<Project[]> {
|
||||
const projects: Project[] = [];
|
||||
|
||||
// Get all directories in the prompts directory
|
||||
const entries = fs.readdirSync(promptsDir, { withFileTypes: true });
|
||||
const projectDirs = entries.filter(entry => entry.isDirectory());
|
||||
|
||||
for (const dir of projectDirs) {
|
||||
const projectPath = path.join(promptsDir, dir.name);
|
||||
const infoPath = path.join(projectPath, 'INFO.md');
|
||||
|
||||
// Skip directories without INFO.md
|
||||
if (!fs.existsSync(infoPath)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read project info
|
||||
const project = await this.readProjectInfo(projectPath, dir.name);
|
||||
projects.push(project);
|
||||
}
|
||||
|
||||
return projects;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read project information from INFO.md
|
||||
* @param projectPath Path to the project directory
|
||||
* @param projectName Name of the project
|
||||
* @returns Project information
|
||||
*/
|
||||
async readProjectInfo(projectPath: string, projectName: string): Promise<Project> {
|
||||
const infoPath = path.join(projectPath, 'INFO.md');
|
||||
const infoContent = fs.readFileSync(infoPath, 'utf-8');
|
||||
|
||||
// Parse INFO.md content
|
||||
const repoHostMatch = infoContent.match(/- \[[ x]\] Repo host: (.*)/);
|
||||
const repoUrlMatch = infoContent.match(/- \[[ x]\] Repo url: (.*)/);
|
||||
const jiraComponentMatch = infoContent.match(/- \[[ x]\] Jira component: (.*)/);
|
||||
|
||||
return {
|
||||
name: projectName,
|
||||
path: projectPath,
|
||||
repoHost: repoHostMatch ? repoHostMatch[1].trim() : undefined,
|
||||
repoUrl: repoUrlMatch ? repoUrlMatch[1].trim() : undefined,
|
||||
jiraComponent: jiraComponentMatch ? jiraComponentMatch[1].trim() : undefined
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all workitems in a project
|
||||
* @param projectPath Path to the project directory
|
||||
* @returns Array of workitems
|
||||
*/
|
||||
async findWorkitems(projectPath: string): Promise<Workitem[]> {
|
||||
const workitems: Workitem[] = [];
|
||||
const workitemsDir = path.join(projectPath, 'workitems');
|
||||
|
||||
// Skip if workitems directory doesn't exist
|
||||
if (!fs.existsSync(workitemsDir)) {
|
||||
return workitems;
|
||||
}
|
||||
|
||||
// Get all markdown files in the workitems directory
|
||||
const files = fs.readdirSync(workitemsDir)
|
||||
.filter(file => file.endsWith('.md'));
|
||||
|
||||
for (const file of files) {
|
||||
const workitemPath = path.join(workitemsDir, file);
|
||||
const workitem = await this.readWorkitemInfo(workitemPath, file);
|
||||
workitems.push(workitem);
|
||||
}
|
||||
|
||||
return workitems;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read workitem information from a markdown file
|
||||
* @param workitemPath Path to the workitem file
|
||||
* @param fileName Name of the workitem file
|
||||
* @returns Workitem information
|
||||
*/
|
||||
async readWorkitemInfo(workitemPath: string, fileName: string): Promise<Workitem> {
|
||||
const content = fs.readFileSync(workitemPath, 'utf-8');
|
||||
|
||||
// Parse workitem content
|
||||
const titleMatch = content.match(/## (.*)/);
|
||||
const jiraMatch = content.match(/- \[[ x]\] Jira: (.*)/);
|
||||
const implementationMatch = content.match(/- \[[ x]\] Implementation: (.*)/);
|
||||
const activeMatch = content.match(/- \[([x ])\] Active/);
|
||||
|
||||
// Extract description (everything between title and first metadata line)
|
||||
let description = '';
|
||||
const lines = content.split('\n');
|
||||
let titleIndex = -1;
|
||||
let metadataIndex = -1;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
if (titleIndex === -1 && lines[i].startsWith('## ')) {
|
||||
titleIndex = i;
|
||||
} else if (titleIndex !== -1 && metadataIndex === -1 && lines[i].startsWith('- [')) {
|
||||
metadataIndex = i;
|
||||
}
|
||||
}
|
||||
|
||||
if (titleIndex !== -1 && metadataIndex !== -1) {
|
||||
description = lines.slice(titleIndex + 1, metadataIndex).join('\n').trim();
|
||||
}
|
||||
|
||||
// Determine if workitem is active
|
||||
// If the Active checkbox is missing, assume it's active
|
||||
const isActive = activeMatch ? activeMatch[1] === 'x' : true;
|
||||
|
||||
return {
|
||||
name: fileName.replace('.md', ''),
|
||||
path: workitemPath,
|
||||
title: titleMatch ? titleMatch[1].trim() : fileName,
|
||||
description,
|
||||
jiraReference: jiraMatch ? jiraMatch[1].trim() : undefined,
|
||||
implementation: implementationMatch ? implementationMatch[1].trim() : undefined,
|
||||
isActive
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Read AI guidelines for a project
|
||||
* @param projectPath Path to the project directory
|
||||
* @returns AI guidelines content
|
||||
*/
|
||||
async readProjectGuidelines(projectPath: string): Promise<string> {
|
||||
const aiPath = path.join(projectPath, 'AI.md');
|
||||
|
||||
if (!fs.existsSync(aiPath)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return fs.readFileSync(aiPath, 'utf-8');
|
||||
}
|
||||
}
|
@ -0,0 +1,194 @@
|
||||
/**
|
||||
* Service for handling pull request operations
|
||||
*/
|
||||
import axios from 'axios';
|
||||
import * as path from 'path';
|
||||
import { Project, RepoCredentials, Workitem } from '../types';
|
||||
|
||||
export class PullRequestService {
|
||||
/**
|
||||
* Create a pull request for changes in a repository
|
||||
* @param project Project information
|
||||
* @param branchName Name of the branch with changes
|
||||
* @param processedWorkitems List of processed workitems
|
||||
* @param credentials Repository credentials
|
||||
* @returns URL of the created pull request
|
||||
*/
|
||||
async createPullRequest(
|
||||
project: Project,
|
||||
branchName: string,
|
||||
processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[],
|
||||
credentials: RepoCredentials
|
||||
): Promise<string> {
|
||||
if (!project.repoHost || !project.repoUrl) {
|
||||
throw new Error(`Repository information not found for project ${project.name}`);
|
||||
}
|
||||
|
||||
// Generate PR title and description
|
||||
const title = `Update workitems: ${new Date().toISOString().split('T')[0]}`;
|
||||
const description = this.generatePullRequestDescription(processedWorkitems);
|
||||
|
||||
// Determine the repository host type and create PR accordingly
|
||||
if (project.repoHost.includes('github.com')) {
|
||||
return this.createGithubPullRequest(project, branchName, title, description, credentials);
|
||||
} else if (project.repoHost.includes('gitea')) {
|
||||
return this.createGiteaPullRequest(project, branchName, title, description, credentials);
|
||||
} else {
|
||||
throw new Error(`Unsupported repository host: ${project.repoHost}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a pull request on GitHub
|
||||
* @param project Project information
|
||||
* @param branchName Name of the branch with changes
|
||||
* @param title Pull request title
|
||||
* @param description Pull request description
|
||||
* @param credentials Repository credentials
|
||||
* @returns URL of the created pull request
|
||||
*/
|
||||
private async createGithubPullRequest(
|
||||
project: Project,
|
||||
branchName: string,
|
||||
title: string,
|
||||
description: string,
|
||||
credentials: RepoCredentials
|
||||
): Promise<string> {
|
||||
// Extract owner and repo from the repository URL
|
||||
const repoUrlParts = project.repoUrl!.split('/');
|
||||
const repo = path.basename(repoUrlParts[repoUrlParts.length - 1], '.git');
|
||||
const owner = repoUrlParts[repoUrlParts.length - 2];
|
||||
|
||||
// Create the pull request
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/pulls`;
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
};
|
||||
|
||||
if (credentials.type === 'token' && credentials.token) {
|
||||
headers['Authorization'] = `token ${credentials.token}`;
|
||||
} else if (credentials.type === 'username-password' && credentials.username && credentials.password) {
|
||||
const auth = Buffer.from(`${credentials.username}:${credentials.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${auth}`;
|
||||
} else {
|
||||
throw new Error('Invalid credentials for GitHub');
|
||||
}
|
||||
|
||||
const response = await axios.post(
|
||||
apiUrl,
|
||||
{
|
||||
title,
|
||||
body: description,
|
||||
head: branchName,
|
||||
base: 'main', // Assuming the default branch is 'main'
|
||||
},
|
||||
{ headers }
|
||||
);
|
||||
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a pull request on Gitea
|
||||
* @param project Project information
|
||||
* @param branchName Name of the branch with changes
|
||||
* @param title Pull request title
|
||||
* @param description Pull request description
|
||||
* @param credentials Repository credentials
|
||||
* @returns URL of the created pull request
|
||||
*/
|
||||
private async createGiteaPullRequest(
|
||||
project: Project,
|
||||
branchName: string,
|
||||
title: string,
|
||||
description: string,
|
||||
credentials: RepoCredentials
|
||||
): Promise<string> {
|
||||
// Extract owner and repo from the repository URL
|
||||
const repoUrlParts = project.repoUrl!.split('/');
|
||||
const repo = path.basename(repoUrlParts[repoUrlParts.length - 1], '.git');
|
||||
const owner = repoUrlParts[repoUrlParts.length - 2];
|
||||
|
||||
// Create the pull request
|
||||
const apiUrl = `${project.repoHost}/api/v1/repos/${owner}/${repo}/pulls`;
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
if (credentials.type === 'token' && credentials.token) {
|
||||
headers['Authorization'] = `token ${credentials.token}`;
|
||||
} else if (credentials.type === 'username-password' && credentials.username && credentials.password) {
|
||||
const auth = Buffer.from(`${credentials.username}:${credentials.password}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${auth}`;
|
||||
} else {
|
||||
throw new Error('Invalid credentials for Gitea');
|
||||
}
|
||||
|
||||
const response = await axios.post(
|
||||
apiUrl,
|
||||
{
|
||||
title,
|
||||
body: description,
|
||||
head: branchName,
|
||||
base: 'main', // Assuming the default branch is 'main'
|
||||
},
|
||||
{ headers }
|
||||
);
|
||||
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a description for the pull request
|
||||
* @param processedWorkitems List of processed workitems
|
||||
* @returns Pull request description
|
||||
*/
|
||||
private generatePullRequestDescription(
|
||||
processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[]
|
||||
): string {
|
||||
const added: string[] = [];
|
||||
const updated: string[] = [];
|
||||
const deleted: string[] = [];
|
||||
const failed: string[] = [];
|
||||
|
||||
for (const item of processedWorkitems) {
|
||||
const { workitem, success, error } = item;
|
||||
|
||||
if (!success) {
|
||||
failed.push(`- ${workitem.name}: ${error}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!workitem.isActive) {
|
||||
deleted.push(`- ${workitem.name}`);
|
||||
} else if (workitem.implementation) {
|
||||
updated.push(`- ${workitem.name}`);
|
||||
} else {
|
||||
added.push(`- ${workitem.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
let description = 'This PR was automatically generated by the prompts-to-test-spec function.\n\n';
|
||||
|
||||
if (added.length > 0) {
|
||||
description += '## Added\n' + added.join('\n') + '\n\n';
|
||||
}
|
||||
|
||||
if (updated.length > 0) {
|
||||
description += '## Updated\n' + updated.join('\n') + '\n\n';
|
||||
}
|
||||
|
||||
if (deleted.length > 0) {
|
||||
description += '## Deleted\n' + deleted.join('\n') + '\n\n';
|
||||
}
|
||||
|
||||
if (failed.length > 0) {
|
||||
description += '## Failed\n' + failed.join('\n') + '\n\n';
|
||||
}
|
||||
|
||||
return description;
|
||||
}
|
||||
}
|
@ -0,0 +1,131 @@
|
||||
/**
|
||||
* Service for handling repository operations
|
||||
*/
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as os from 'os';
|
||||
import { simpleGit, SimpleGit } from 'simple-git';
|
||||
import { Project, RepoCredentials } from '../types';
|
||||
|
||||
export class RepositoryService {
|
||||
private baseDir: string;
|
||||
|
||||
constructor(baseDir?: string) {
|
||||
this.baseDir = baseDir || path.join(os.tmpdir(), 'prompts-to-test-spec');
|
||||
|
||||
// Ensure base directory exists
|
||||
if (!fs.existsSync(this.baseDir)) {
|
||||
fs.mkdirSync(this.baseDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone the main repository containing prompts
|
||||
* @param repoUrl URL of the repository
|
||||
* @param credentials Optional credentials for private repositories
|
||||
* @returns Path to the cloned repository
|
||||
*/
|
||||
async cloneMainRepository(repoUrl: string, credentials?: RepoCredentials): Promise<string> {
|
||||
const repoDir = path.join(this.baseDir, 'main-repo');
|
||||
|
||||
// Clean up existing directory if it exists
|
||||
if (fs.existsSync(repoDir)) {
|
||||
fs.rmSync(repoDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
fs.mkdirSync(repoDir, { recursive: true });
|
||||
|
||||
// Configure git with credentials if provided
|
||||
const git = this.configureGit(repoDir, credentials);
|
||||
|
||||
// Clone the repository
|
||||
await git.clone(repoUrl, repoDir);
|
||||
|
||||
return repoDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a project repository
|
||||
* @param project Project information
|
||||
* @param credentials Optional credentials for private repositories
|
||||
* @returns Path to the cloned repository
|
||||
*/
|
||||
async cloneProjectRepository(project: Project, credentials?: RepoCredentials): Promise<string> {
|
||||
if (!project.repoUrl) {
|
||||
throw new Error(`Repository URL not found for project ${project.name}`);
|
||||
}
|
||||
|
||||
const projectRepoDir = path.join(this.baseDir, `project-${project.name}`);
|
||||
|
||||
// Clean up existing directory if it exists
|
||||
if (fs.existsSync(projectRepoDir)) {
|
||||
fs.rmSync(projectRepoDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
fs.mkdirSync(projectRepoDir, { recursive: true });
|
||||
|
||||
// Configure git with credentials if provided
|
||||
const git = this.configureGit(projectRepoDir, credentials);
|
||||
|
||||
// Clone the repository
|
||||
await git.clone(project.repoUrl, projectRepoDir);
|
||||
|
||||
return projectRepoDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new branch in a repository
|
||||
* @param repoDir Path to the repository
|
||||
* @param branchName Name of the branch to create
|
||||
*/
|
||||
async createBranch(repoDir: string, branchName: string): Promise<void> {
|
||||
const git = simpleGit(repoDir);
|
||||
await git.checkoutLocalBranch(branchName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit changes to a repository
|
||||
* @param repoDir Path to the repository
|
||||
* @param message Commit message
|
||||
*/
|
||||
async commitChanges(repoDir: string, message: string): Promise<void> {
|
||||
const git = simpleGit(repoDir);
|
||||
await git.add('.');
|
||||
await git.commit(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Push changes to a repository
|
||||
* @param repoDir Path to the repository
|
||||
* @param branchName Name of the branch to push
|
||||
* @param credentials Optional credentials for private repositories
|
||||
*/
|
||||
async pushChanges(repoDir: string, branchName: string, credentials?: RepoCredentials): Promise<void> {
|
||||
const git = this.configureGit(repoDir, credentials);
|
||||
await git.push('origin', branchName, ['--set-upstream']);
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure git with credentials
|
||||
* @param repoDir Path to the repository
|
||||
* @param credentials Credentials for authentication
|
||||
* @returns Configured SimpleGit instance
|
||||
*/
|
||||
private configureGit(repoDir: string, credentials?: RepoCredentials): SimpleGit {
|
||||
const git = simpleGit(repoDir);
|
||||
|
||||
if (credentials) {
|
||||
if (credentials.type === 'username-password' && credentials.username && credentials.password) {
|
||||
// For HTTPS URLs with username/password
|
||||
const credentialHelper = `!f() { echo "username=${credentials.username}"; echo "password=${credentials.password}"; }; f`;
|
||||
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
||||
} else if (credentials.type === 'token' && credentials.token) {
|
||||
// For HTTPS URLs with token
|
||||
const credentialHelper = `!f() { echo "password=${credentials.token}"; }; f`;
|
||||
git.addConfig('credential.helper', credentialHelper, false, 'global');
|
||||
}
|
||||
}
|
||||
|
||||
return git;
|
||||
}
|
||||
}
|
38
src/functions/prompts-to-test-spec/src/types.ts
Normal file
38
src/functions/prompts-to-test-spec/src/types.ts
Normal file
@ -0,0 +1,38 @@
|
||||
/**
|
||||
* Type definitions for the prompts-to-test-spec function
|
||||
*/
|
||||
|
||||
export interface Project {
|
||||
name: string;
|
||||
path: string;
|
||||
repoHost?: string;
|
||||
repoUrl?: string;
|
||||
jiraComponent?: string;
|
||||
}
|
||||
|
||||
export interface Workitem {
|
||||
name: string;
|
||||
path: string;
|
||||
title: string;
|
||||
description: string;
|
||||
jiraReference?: string;
|
||||
implementation?: string;
|
||||
isActive: boolean;
|
||||
}
|
||||
|
||||
export interface RepoCredentials {
|
||||
type: 'username-password' | 'token';
|
||||
username?: string;
|
||||
password?: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
export interface ProcessResult {
|
||||
project: Project;
|
||||
processedWorkitems: {
|
||||
workitem: Workitem;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}[];
|
||||
pullRequestUrl?: string;
|
||||
}
|
18
src/functions/prompts-to-test-spec/tsconfig.json
Normal file
18
src/functions/prompts-to-test-spec/tsconfig.json
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "CommonJS",
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist"
|
||||
]
|
||||
}
|
66
src/prompts/AI.md
Normal file
66
src/prompts/AI.md
Normal file
@ -0,0 +1,66 @@
|
||||
This file describes the AI guidelines for operations in this directory.
|
||||
|
||||
## Directory structure
|
||||
|
||||
- <project>/: A single project repository
|
||||
- INFO.md: Project information, including where the code is hosted
|
||||
- AI.md: AI guidelines for the project
|
||||
- workitems/: A directory containing workitem prompts
|
||||
- <date>-<workitem-name>.md: A prompt file for a project workitem
|
||||
|
||||
### File format
|
||||
|
||||
File format is markdown.
|
||||
It contains checkboxes, that must only be checked if the information is available and provided.
|
||||
|
||||
#### Project info file format
|
||||
|
||||
A project info file follows the following format:
|
||||
|
||||
```markdown
|
||||
## <Project name>
|
||||
|
||||
- [ ] Repo host: <repo host url, eg https://gitea.fteamdev.valuya.be/ or https://github.com/organizations/Ebitda-SRL>
|
||||
- [ ] Repo url: <url of the project repository>
|
||||
- [ ] Jira component: <component of the jira>
|
||||
|
||||
```
|
||||
|
||||
#### Work item prompt file format
|
||||
|
||||
A work item prompt file follows the following format:
|
||||
|
||||
```markdown
|
||||
## <workitem name>
|
||||
|
||||
<short paragraphs describing the workitem, with line wrapping>
|
||||
|
||||
- [ ] Jira: <reference of the jira ticket with a link>
|
||||
- [ ] Implementation: <reference of the implementation within the project repo, optionally with a link>
|
||||
- [ ] Active
|
||||
|
||||
```
|
||||
|
||||
The active checkbox is optional and should be checked if the workitem is active. Inactive workitems should be ignored.
|
||||
In the absence of the active checkbox, the workitem is assumed to be active.
|
||||
|
||||
### Credentials
|
||||
|
||||
This section describes credentials to use when interacting with various apis and services.
|
||||
|
||||
The actual credentials are provided in the environment variables.
|
||||
|
||||
#### Jira
|
||||
|
||||
#### Github
|
||||
|
||||
- [ ] host: https://github.com/organizations/Ebitda-SRL
|
||||
|
||||
#### Gitea
|
||||
|
||||
- [x] host: https://gitea.fteamdev.valuya.be
|
||||
- credential type: username/password
|
||||
- username variable: GITEA_USERNAME
|
||||
- password variable: GITEA_PASSWORD
|
||||
|
||||
|
12
src/prompts/nitro-back/AI.md
Normal file
12
src/prompts/nitro-back/AI.md
Normal file
@ -0,0 +1,12 @@
|
||||
## Workitem implementation
|
||||
|
||||
- Implement the workitem as a cucumber feature spec file in the `nitro-it/src/test/resources/workitems/` folder.
|
||||
- Use the workitem prompt file name as the feature file name.
|
||||
- Add comments in the feature file indicating
|
||||
- The date/time/execution info of the job that created the work item
|
||||
- The work item prompt file in this directory
|
||||
- Inactive work items should have their feature file deleted.
|
||||
- Updates should be committed to a new branch and a pull request should be created.s
|
||||
- The pull request should include a short description of the modified code
|
||||
- The pull request description should include the list of work items that were added/updated/deleted
|
||||
|
7
src/prompts/nitro-back/INFO.md
Normal file
7
src/prompts/nitro-back/INFO.md
Normal file
@ -0,0 +1,7 @@
|
||||
# Nitro-back
|
||||
|
||||
Nitro backend server in quarkus
|
||||
|
||||
- [x] Repo host: https://gitea.fteamdev.valuya.be/
|
||||
- [x] Repo url: https://gitea.fteamdev.valuya.be/fiscalteam/nitro-back.git
|
||||
- [x] Jira component: nitro
|
9
src/prompts/nitro-back/workitems/2025-06-08-test.md
Normal file
9
src/prompts/nitro-back/workitems/2025-06-08-test.md
Normal file
@ -0,0 +1,9 @@
|
||||
## Test
|
||||
|
||||
This is a test workitem.
|
||||
|
||||
The nitro-back backend should have a /test endpoint implemented returning the json object: {"test": "Hellow"}.
|
||||
|
||||
- [ ] Jira:
|
||||
- [ ] Implementation:
|
||||
- [ ] Active
|
Loading…
x
Reference in New Issue
Block a user