This commit is contained in:
cghislai 2025-06-08 01:43:04 +02:00
parent d6ddd8aa45
commit c3626542d6
12 changed files with 1001 additions and 22 deletions

View File

@ -25,3 +25,10 @@ GEMINI_MODEL=gemini-1.5-pro
# Function configuration # Function configuration
# Set to 'true' to enable debug logging # Set to 'true' to enable debug logging
DEBUG=false DEBUG=false
# Set to 'true' to use local repository instead of cloning
USE_LOCAL_REPO=false
# Dry run options
# Set to 'true' to skip Gemini API calls (returns mock responses)
DRY_RUN_SKIP_GEMINI=false
# Set to 'true' to skip creating commits and PRs
DRY_RUN_SKIP_COMMITS=false

View File

@ -56,25 +56,52 @@ The function requires several environment variables to be set:
- `GOOGLE_CLOUD_LOCATION`: Google Cloud region (default: us-central1) - `GOOGLE_CLOUD_LOCATION`: Google Cloud region (default: us-central1)
- `GEMINI_MODEL`: Gemini model to use (default: gemini-1.5-pro) - `GEMINI_MODEL`: Gemini model to use (default: gemini-1.5-pro)
### Function Configuration
- `DEBUG`: Set to 'true' to enable debug logging
- `USE_LOCAL_REPO`: Set to 'true' to use local repository instead of cloning
- `DRY_RUN_SKIP_GEMINI`: Set to 'true' to skip Gemini API calls (returns mock responses)
- `DRY_RUN_SKIP_COMMITS`: Set to 'true' to skip creating commits and PRs
## Local Development ## Local Development
To run the function locally: There are two ways to run the function locally:
1. Build the function: ### Option 1: Direct Execution
This runs the function directly as a Node.js application:
```
npm start
```
This will execute the main processing logic directly without starting an HTTP server.
### Option 2: Functions Framework (Recommended)
This uses the Functions Framework to emulate the Cloud Functions environment locally:
1. Run the HTTP function:
``` ```
npm run build npm run dev
``` ```
2. Start the function: 2. Run the HTTP function with watch mode (auto-reloads on changes):
``` ```
npm start npm run dev:watch
``` ```
3. Test the HTTP endpoint: 3. Run the CloudEvent function:
``` ```
curl http://localhost:8080 npm run dev:event
``` ```
4. Test the HTTP endpoint:
```
curl http://localhost:18080
```
The Functions Framework provides a more accurate representation of how your function will behave when deployed to Google Cloud.
## Testing ## Testing
Run the tests: Run the tests:

View File

@ -18,7 +18,9 @@
"@types/express": "^5.0.3", "@types/express": "^5.0.3",
"@types/jest": "^29.5.12", "@types/jest": "^29.5.12",
"@types/node": "^20.11.30", "@types/node": "^20.11.30",
"concurrently": "^8.2.2",
"jest": "^29.7.0", "jest": "^29.7.0",
"nodemon": "^3.0.3",
"ts-jest": "^29.1.2", "ts-jest": "^29.1.2",
"typescript": "^5.8.3" "typescript": "^5.8.3"
}, },
@ -514,6 +516,16 @@
"@babel/core": "^7.0.0-0" "@babel/core": "^7.0.0-0"
} }
}, },
"node_modules/@babel/runtime": {
"version": "7.27.6",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.27.6.tgz",
"integrity": "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/template": { "node_modules/@babel/template": {
"version": "7.27.2", "version": "7.27.2",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
@ -1652,6 +1664,19 @@
"node": "*" "node": "*"
} }
}, },
"node_modules/binary-extensions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/body-parser": { "node_modules/body-parser": {
"version": "1.20.3", "version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
@ -1893,6 +1918,31 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/chokidar": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
"dev": true,
"license": "MIT",
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
"glob-parent": "~5.1.2",
"is-binary-path": "~2.1.0",
"is-glob": "~4.0.1",
"normalize-path": "~3.0.0",
"readdirp": "~3.6.0"
},
"engines": {
"node": ">= 8.10.0"
},
"funding": {
"url": "https://paulmillr.com/funding/"
},
"optionalDependencies": {
"fsevents": "~2.3.2"
}
},
"node_modules/ci-info": { "node_modules/ci-info": {
"version": "3.9.0", "version": "3.9.0",
"resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz",
@ -2005,6 +2055,50 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/concurrently": {
"version": "8.2.2",
"resolved": "https://registry.npmjs.org/concurrently/-/concurrently-8.2.2.tgz",
"integrity": "sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg==",
"dev": true,
"license": "MIT",
"dependencies": {
"chalk": "^4.1.2",
"date-fns": "^2.30.0",
"lodash": "^4.17.21",
"rxjs": "^7.8.1",
"shell-quote": "^1.8.1",
"spawn-command": "0.0.2",
"supports-color": "^8.1.1",
"tree-kill": "^1.2.2",
"yargs": "^17.7.2"
},
"bin": {
"conc": "dist/bin/concurrently.js",
"concurrently": "dist/bin/concurrently.js"
},
"engines": {
"node": "^14.13.0 || >=16.0.0"
},
"funding": {
"url": "https://github.com/open-cli-tools/concurrently?sponsor=1"
}
},
"node_modules/concurrently/node_modules/supports-color": {
"version": "8.1.1",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz",
"integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^4.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/supports-color?sponsor=1"
}
},
"node_modules/content-disposition": { "node_modules/content-disposition": {
"version": "0.5.4", "version": "0.5.4",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
@ -2085,6 +2179,23 @@
"node": ">= 8" "node": ">= 8"
} }
}, },
"node_modules/date-fns": {
"version": "2.30.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz",
"integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.21.0"
},
"engines": {
"node": ">=0.11"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/debug": { "node_modules/debug": {
"version": "2.6.9", "version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
@ -2843,6 +2954,19 @@
"url": "https://github.com/sponsors/isaacs" "url": "https://github.com/sponsors/isaacs"
} }
}, },
"node_modules/glob-parent": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/globals": { "node_modules/globals": {
"version": "11.12.0", "version": "11.12.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz",
@ -3059,6 +3183,13 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/ignore-by-default": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz",
"integrity": "sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==",
"dev": true,
"license": "ISC"
},
"node_modules/import-local": { "node_modules/import-local": {
"version": "3.2.0", "version": "3.2.0",
"resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz",
@ -3138,6 +3269,19 @@
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/is-binary-path": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dev": true,
"license": "MIT",
"dependencies": {
"binary-extensions": "^2.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/is-callable": { "node_modules/is-callable": {
"version": "1.2.7", "version": "1.2.7",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
@ -3165,6 +3309,16 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-fullwidth-code-point": { "node_modules/is-fullwidth-code-point": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
@ -3203,6 +3357,19 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/is-glob": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-extglob": "^2.1.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-number": { "node_modules/is-number": {
"version": "7.0.0", "version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
@ -4137,6 +4304,13 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.memoize": { "node_modules/lodash.memoize": {
"version": "4.1.2", "version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@ -4365,6 +4539,83 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/nodemon": {
"version": "3.1.10",
"resolved": "https://registry.npmjs.org/nodemon/-/nodemon-3.1.10.tgz",
"integrity": "sha512-WDjw3pJ0/0jMFmyNDp3gvY2YizjLmmOUQo6DEBY+JgdvW/yQ9mEeSw6H5ythl5Ny2ytb7f9C2nIbjSxMNzbJXw==",
"dev": true,
"license": "MIT",
"dependencies": {
"chokidar": "^3.5.2",
"debug": "^4",
"ignore-by-default": "^1.0.1",
"minimatch": "^3.1.2",
"pstree.remy": "^1.1.8",
"semver": "^7.5.3",
"simple-update-notifier": "^2.0.0",
"supports-color": "^5.5.0",
"touch": "^3.1.0",
"undefsafe": "^2.0.5"
},
"bin": {
"nodemon": "bin/nodemon.js"
},
"engines": {
"node": ">=10"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/nodemon"
}
},
"node_modules/nodemon/node_modules/debug": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/nodemon/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/nodemon/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true,
"license": "MIT"
},
"node_modules/nodemon/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/normalize-package-data": { "node_modules/normalize-package-data": {
"version": "2.5.0", "version": "2.5.0",
"resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
@ -4684,6 +4935,13 @@
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/pstree.remy": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
"integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==",
"dev": true,
"license": "MIT"
},
"node_modules/pure-rand": { "node_modules/pure-rand": {
"version": "6.1.0", "version": "6.1.0",
"resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz",
@ -4788,6 +5046,19 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/readdirp": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dev": true,
"license": "MIT",
"dependencies": {
"picomatch": "^2.2.1"
},
"engines": {
"node": ">=8.10.0"
}
},
"node_modules/require-directory": { "node_modules/require-directory": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
@ -4860,6 +5131,16 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/rxjs": {
"version": "7.8.2",
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz",
"integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
"tslib": "^2.1.0"
}
},
"node_modules/safe-buffer": { "node_modules/safe-buffer": {
"version": "5.2.1", "version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -5015,6 +5296,19 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/shell-quote": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz",
"integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel": { "node_modules/side-channel": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
@ -5132,6 +5426,19 @@
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/simple-update-notifier": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz",
"integrity": "sha512-a2B9Y0KlNXl9u/vsW6sTIu9vGEpfKu2wRV6l1H3XEas/0gUIzGzBoP/IouTcUQbm9JWZLH3COxyn03TYlFax6w==",
"dev": true,
"license": "MIT",
"dependencies": {
"semver": "^7.5.3"
},
"engines": {
"node": ">=10"
}
},
"node_modules/sisteransi": { "node_modules/sisteransi": {
"version": "1.0.5", "version": "1.0.5",
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
@ -5170,6 +5477,12 @@
"source-map": "^0.6.0" "source-map": "^0.6.0"
} }
}, },
"node_modules/spawn-command": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/spawn-command/-/spawn-command-0.0.2.tgz",
"integrity": "sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==",
"dev": true
},
"node_modules/spdx-correct": { "node_modules/spdx-correct": {
"version": "3.2.0", "version": "3.2.0",
"resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz",
@ -5375,12 +5688,32 @@
"node": ">=0.6" "node": ">=0.6"
} }
}, },
"node_modules/touch": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz",
"integrity": "sha512-r0eojU4bI8MnHr8c5bNo7lJDdI2qXlWWJk6a9EAFG7vbhTjElYhBVS3/miuE0uOuoLdb8Mc/rVfsmm6eo5o9GA==",
"dev": true,
"license": "ISC",
"bin": {
"nodetouch": "bin/nodetouch.js"
}
},
"node_modules/tr46": { "node_modules/tr46": {
"version": "0.0.3", "version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/tree-kill": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/tree-kill/-/tree-kill-1.2.2.tgz",
"integrity": "sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==",
"dev": true,
"license": "MIT",
"bin": {
"tree-kill": "cli.js"
}
},
"node_modules/ts-jest": { "node_modules/ts-jest": {
"version": "29.3.4", "version": "29.3.4",
"resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.4.tgz", "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.4.tgz",
@ -5444,6 +5777,13 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"dev": true,
"license": "0BSD"
},
"node_modules/type-detect": { "node_modules/type-detect": {
"version": "4.0.8", "version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
@ -5490,6 +5830,13 @@
"node": ">=14.17" "node": ">=14.17"
} }
}, },
"node_modules/undefsafe": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz",
"integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==",
"dev": true,
"license": "MIT"
},
"node_modules/undici-types": { "node_modules/undici-types": {
"version": "6.21.0", "version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",

View File

@ -9,7 +9,10 @@
"deploy:event": "gcloud functions deploy promptToTestSpecEvent --gen2 --runtime=nodejs20 --source=. --trigger-event=google.cloud.storage.object.v1.finalized --trigger-resource=YOUR_BUCKET_NAME", "deploy:event": "gcloud functions deploy promptToTestSpecEvent --gen2 --runtime=nodejs20 --source=. --trigger-event=google.cloud.storage.object.v1.finalized --trigger-resource=YOUR_BUCKET_NAME",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"test": "jest", "test": "jest",
"test:watch": "jest --watch" "test:watch": "jest --watch",
"dev": "npm run build && functions-framework --target=promptToTestSpecHttp --port=18080",
"dev:watch": "concurrently \"tsc -w\" \"nodemon --watch dist/ --exec functions-framework --target=promptToTestSpecHttp --port=18080\"",
"dev:event": "npm run build && functions-framework --target=promptToTestSpecEvent --signature-type=event"
}, },
"main": "dist/index.js", "main": "dist/index.js",
"dependencies": { "dependencies": {
@ -23,7 +26,9 @@
"@types/express": "^5.0.3", "@types/express": "^5.0.3",
"@types/jest": "^29.5.12", "@types/jest": "^29.5.12",
"@types/node": "^20.11.30", "@types/node": "^20.11.30",
"concurrently": "^8.2.2",
"jest": "^29.7.0", "jest": "^29.7.0",
"nodemon": "^3.0.3",
"ts-jest": "^29.1.2", "ts-jest": "^29.1.2",
"typescript": "^5.8.3" "typescript": "^5.8.3"
}, },

View File

@ -30,6 +30,8 @@ export const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-1.5-pro';
// Function configuration // Function configuration
export const DEBUG = process.env.DEBUG === 'true'; export const DEBUG = process.env.DEBUG === 'true';
export const USE_LOCAL_REPO = process.env.USE_LOCAL_REPO === 'true'; export const USE_LOCAL_REPO = process.env.USE_LOCAL_REPO === 'true';
export const DRY_RUN_SKIP_GEMINI = process.env.DRY_RUN_SKIP_GEMINI === 'true';
export const DRY_RUN_SKIP_COMMITS = process.env.DRY_RUN_SKIP_COMMITS === 'true';
// Validate required configuration // Validate required configuration
export function validateConfig(): void { export function validateConfig(): void {

View File

@ -1,6 +1,6 @@
import {CloudEvent, cloudEvent, http} from '@google-cloud/functions-framework'; import {CloudEvent, cloudEvent, http} from '@google-cloud/functions-framework';
import { ProcessorService } from './services/processor-service'; import { ProcessorService } from './services/processor-service';
import { validateConfig } from './config'; import { validateConfig, DRY_RUN_SKIP_GEMINI, DRY_RUN_SKIP_COMMITS } from './config';
// Validate configuration on startup // Validate configuration on startup
try { try {
@ -10,6 +10,34 @@ try {
// Don't throw here to allow the function to start, but it will fail when executed // Don't throw here to allow the function to start, but it will fail when executed
} }
// Check if this is being run directly (via npm start)
const isRunningDirectly = require.main === module;
if (isRunningDirectly) {
console.log('Starting prompts-to-test-spec directly...');
// Log dry run status
if (DRY_RUN_SKIP_GEMINI) {
console.log('DRY RUN: Gemini API calls will be skipped');
}
if (DRY_RUN_SKIP_COMMITS) {
console.log('DRY RUN: Commits and PRs will not be created');
}
// Run the processor
(async () => {
try {
const processor = new ProcessorService();
console.log('Processing projects...');
const results = await processor.processProjects();
console.log('Processing completed successfully');
console.log('Results:', JSON.stringify(results, null, 2));
} catch (error) {
console.error('Error processing projects:', error);
process.exit(1);
}
})();
}
/** /**
* HTTP endpoint for the prompts-to-test-spec function * HTTP endpoint for the prompts-to-test-spec function
*/ */

View File

@ -0,0 +1,259 @@
import * as fs from 'fs';
import * as path from 'path';
import { ProcessorService } from '../processor-service';
import { ProjectService } from '../project-service';
import { RepositoryService } from '../repository-service';
import { Project, Workitem, ProcessResult } from '../../types';
// Mock dependencies
jest.mock('../project-service');
jest.mock('../repository-service');
jest.mock('../../config', () => ({
validateConfig: jest.fn(),
getMainRepoCredentials: jest.fn().mockReturnValue({ type: 'token', token: 'mock-token' }),
getGithubCredentials: jest.fn().mockReturnValue({ type: 'token', token: 'mock-token' }),
getGiteaCredentials: jest.fn().mockReturnValue({ type: 'token', token: 'mock-token' }),
MAIN_REPO_URL: 'https://github.com/org/main-repo.git',
GOOGLE_CLOUD_PROJECT_ID: 'mock-project-id',
GOOGLE_CLOUD_LOCATION: 'mock-location',
GEMINI_MODEL: 'mock-model',
USE_LOCAL_REPO: false,
DRY_RUN_SKIP_COMMITS: false
}));
describe('ProcessorService', () => {
let processorService: ProcessorService;
let mockProjectService: jest.Mocked<ProjectService>;
let mockRepositoryService: jest.Mocked<RepositoryService>;
beforeEach(() => {
jest.clearAllMocks();
processorService = new ProcessorService();
mockProjectService = ProjectService.prototype as jest.Mocked<ProjectService>;
mockRepositoryService = RepositoryService.prototype as jest.Mocked<RepositoryService>;
});
describe('updateWorkitemFilesWithPullRequestUrls', () => {
it('should update workitem files with pull request URLs and commit changes', async () => {
// Create test data
const mainRepoPath = '/path/to/main/repo';
const project: Project = {
name: 'test-project',
path: '/path/to/project',
repoHost: 'https://github.com',
repoUrl: 'https://github.com/org/test-project.git'
};
const workitem1: Workitem = {
name: 'workitem1',
path: '/path/to/workitem1.md',
title: 'Workitem 1',
description: 'Description 1',
isActive: true
};
const workitem2: Workitem = {
name: 'workitem2',
path: '/path/to/workitem2.md',
title: 'Workitem 2',
description: 'Description 2',
isActive: true
};
const results: ProcessResult[] = [
{
project,
processedWorkitems: [
{ workitem: workitem1, success: true },
{ workitem: workitem2, success: true }
],
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
}
];
// Mock the updateWorkitemWithPullRequestUrl method
mockProjectService.updateWorkitemWithPullRequestUrl.mockImplementation(
async (workitem, pullRequestUrl) => {
return { ...workitem, pullRequestUrl };
}
);
// Call the method
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
// Verify the method calls
expect(mockRepositoryService.createBranch).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
);
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledTimes(2);
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledWith(
workitem1,
'https://github.com/org/test-project/pull/123'
);
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledWith(
workitem2,
'https://github.com/org/test-project/pull/123'
);
expect(mockRepositoryService.commitChanges).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
);
expect(mockRepositoryService.pushChanges).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
expect.anything()
);
});
it('should handle deactivated workitems correctly', async () => {
// Create test data
const mainRepoPath = '/path/to/main/repo';
const project: Project = {
name: 'test-project',
path: '/path/to/project',
repoHost: 'https://github.com',
repoUrl: 'https://github.com/org/test-project.git'
};
const activeWorkitem: Workitem = {
name: 'active-workitem',
path: '/path/to/active-workitem.md',
title: 'Active Workitem',
description: 'This is an active workitem',
isActive: true
};
const deactivatedWorkitem: Workitem = {
name: 'deactivated-workitem',
path: '/path/to/deactivated-workitem.md',
title: 'Deactivated Workitem',
description: 'This is a deactivated workitem',
isActive: false
};
const results: ProcessResult[] = [
{
project,
processedWorkitems: [
{ workitem: activeWorkitem, success: true },
{ workitem: deactivatedWorkitem, success: true }
],
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
}
];
// Mock the updateWorkitemWithPullRequestUrl method
mockProjectService.updateWorkitemWithPullRequestUrl.mockImplementation(
async (workitem, pullRequestUrl) => {
return { ...workitem, pullRequestUrl };
}
);
// Call the method
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
// Verify the method calls
expect(mockRepositoryService.createBranch).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/)
);
// Should only update the active workitem
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledTimes(2);
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledWith(
activeWorkitem,
'https://github.com/org/test-project/pull/123'
);
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalledWith(
deactivatedWorkitem,
'https://github.com/org/test-project/pull/123'
);
expect(mockRepositoryService.commitChanges).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/Update workitem files with pull request URLs: \d{4}-\d{2}-\d{2}/)
);
expect(mockRepositoryService.pushChanges).toHaveBeenCalledWith(
mainRepoPath,
expect.stringMatching(/update-workitem-pr-urls-\d{4}-\d{2}-\d{2}/),
expect.anything()
);
});
it('should not commit changes if no workitems were updated', async () => {
// Create test data with no pull request URL
const mainRepoPath = '/path/to/main/repo';
const project: Project = {
name: 'test-project',
path: '/path/to/project',
repoHost: 'https://github.com',
repoUrl: 'https://github.com/org/test-project.git'
};
const results: ProcessResult[] = [
{
project,
processedWorkitems: [],
// No pull request URL
}
];
// Call the method
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
// Verify the method calls
expect(mockRepositoryService.createBranch).toHaveBeenCalled();
expect(mockProjectService.updateWorkitemWithPullRequestUrl).not.toHaveBeenCalled();
expect(mockRepositoryService.commitChanges).not.toHaveBeenCalled();
expect(mockRepositoryService.pushChanges).not.toHaveBeenCalled();
});
it('should handle errors when updating workitem files', async () => {
// Create test data
const mainRepoPath = '/path/to/main/repo';
const project: Project = {
name: 'test-project',
path: '/path/to/project',
repoHost: 'https://github.com',
repoUrl: 'https://github.com/org/test-project.git'
};
const workitem: Workitem = {
name: 'workitem',
path: '/path/to/workitem.md',
title: 'Workitem',
description: 'Description',
isActive: true
};
const results: ProcessResult[] = [
{
project,
processedWorkitems: [
{ workitem, success: true }
],
pullRequestUrl: 'https://github.com/org/test-project/pull/123'
}
];
// Mock the updateWorkitemWithPullRequestUrl method to throw an error
mockProjectService.updateWorkitemWithPullRequestUrl.mockRejectedValueOnce(
new Error('Failed to update workitem')
);
// Call the method
await (processorService as any).updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
// Verify the method calls
expect(mockRepositoryService.createBranch).toHaveBeenCalled();
expect(mockProjectService.updateWorkitemWithPullRequestUrl).toHaveBeenCalled();
expect(mockRepositoryService.commitChanges).not.toHaveBeenCalled();
expect(mockRepositoryService.pushChanges).not.toHaveBeenCalled();
});
});
});

View File

@ -29,9 +29,9 @@ describe('ProjectService', () => {
{ name: 'README.md', isDirectory: () => false } { name: 'README.md', isDirectory: () => false }
]); ]);
// Mock fs.existsSync to return true for INFO.md files // Mock fs.existsSync to return true for prompts directory and INFO.md files
(fs.existsSync as jest.Mock).mockImplementation((path: string) => { (fs.existsSync as jest.Mock).mockImplementation((path: string) => {
return path.endsWith('project1/INFO.md') || path.endsWith('project2/INFO.md'); return path === 'prompts' || path.endsWith('project1/INFO.md') || path.endsWith('project2/INFO.md');
}); });
// Mock readProjectInfo // Mock readProjectInfo
@ -225,4 +225,136 @@ This is a description of the workitem.
expect(fs.readFileSync).not.toHaveBeenCalled(); expect(fs.readFileSync).not.toHaveBeenCalled();
}); });
}); });
describe('updateWorkitemWithPullRequestUrl', () => {
it('should add pull request URL to workitem file that does not have one', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Active
`;
const expectedUpdatedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Pull Request: https://github.com/org/repo/pull/123
- [x] Active
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const pullRequestUrl = 'https://github.com/org/repo/pull/123';
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValueOnce(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
// Mock fs.writeFileSync
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {});
const updatedWorkitem = await projectService.updateWorkitemWithPullRequestUrl(workitem, pullRequestUrl);
expect(updatedWorkitem).toEqual({
...workitem,
pullRequestUrl
});
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
expect(fs.writeFileSync).toHaveBeenCalledWith('path/to/workitem.md', expectedUpdatedContent, 'utf-8');
});
it('should update existing pull request URL in workitem file', async () => {
const workitemContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Pull Request: https://github.com/org/repo/pull/100
- [x] Active
`;
const expectedUpdatedContent = `## Workitem Title
This is a description of the workitem.
- [x] Jira: JIRA-123
- [ ] Implementation:
- [x] Pull Request: https://github.com/org/repo/pull/123
- [x] Active
`;
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
pullRequestUrl: 'https://github.com/org/repo/pull/100',
isActive: true
};
const pullRequestUrl = 'https://github.com/org/repo/pull/123';
// Mock fs.existsSync to return true for workitem file
(fs.existsSync as jest.Mock).mockReturnValueOnce(true);
// Mock fs.readFileSync to return workitem content
(fs.readFileSync as jest.Mock).mockReturnValueOnce(workitemContent);
// Mock fs.writeFileSync
(fs.writeFileSync as jest.Mock).mockImplementationOnce(() => {});
const updatedWorkitem = await projectService.updateWorkitemWithPullRequestUrl(workitem, pullRequestUrl);
expect(updatedWorkitem).toEqual({
...workitem,
pullRequestUrl
});
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).toHaveBeenCalledWith('path/to/workitem.md', 'utf-8');
expect(fs.writeFileSync).toHaveBeenCalledWith('path/to/workitem.md', expectedUpdatedContent, 'utf-8');
});
it('should throw error if workitem file does not exist', async () => {
const workitem = {
name: 'workitem',
path: 'path/to/workitem.md',
title: 'Workitem Title',
description: 'This is a description of the workitem.',
jiraReference: 'JIRA-123',
implementation: '',
isActive: true
};
const pullRequestUrl = 'https://github.com/org/repo/pull/123';
// Mock fs.existsSync to return false for workitem file
(fs.existsSync as jest.Mock).mockReturnValueOnce(false);
await expect(projectService.updateWorkitemWithPullRequestUrl(workitem, pullRequestUrl))
.rejects.toThrow('Workitem file not found: path/to/workitem.md');
expect(fs.existsSync).toHaveBeenCalledWith('path/to/workitem.md');
expect(fs.readFileSync).not.toHaveBeenCalled();
expect(fs.writeFileSync).not.toHaveBeenCalled();
});
});
}); });

View File

@ -5,7 +5,12 @@ import { VertexAI } from '@google-cloud/vertexai';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import { Project, Workitem } from '../types'; import { Project, Workitem } from '../types';
import { GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_LOCATION, GEMINI_MODEL } from '../config'; import {
GOOGLE_CLOUD_PROJECT_ID,
GOOGLE_CLOUD_LOCATION,
GEMINI_MODEL,
DRY_RUN_SKIP_GEMINI
} from '../config';
export class GeminiService { export class GeminiService {
private vertexAI: VertexAI; private vertexAI: VertexAI;
@ -120,12 +125,29 @@ export class GeminiService {
workitemContent: string, workitemContent: string,
workitemName: string workitemName: string
): Promise<string> { ): Promise<string> {
const currentDate = new Date().toISOString();
// If dry run is enabled, return a mock feature file
if (DRY_RUN_SKIP_GEMINI) {
console.log(`[DRY RUN] Skipping Gemini API call for generating feature file for ${workitemName}`);
return `# Generated by prompts-to-test-spec on ${currentDate} (DRY RUN)
# Source: ${workitemName}
Feature: ${workitemName} (DRY RUN)
This is a mock feature file generated during dry run.
No actual Gemini API call was made.
Scenario: Mock scenario
Given a dry run is enabled
When the feature file is generated
Then a mock feature file is returned
`;
}
const generativeModel = this.vertexAI.getGenerativeModel({ const generativeModel = this.vertexAI.getGenerativeModel({
model: this.model, model: this.model,
}); });
const currentDate = new Date().toISOString();
// Send the AI.md file directly to Gemini without hardcoded instructions // Send the AI.md file directly to Gemini without hardcoded instructions
const prompt = ` const prompt = `
${guidelines} ${guidelines}
@ -156,10 +178,6 @@ Include the following comment at the top of the generated file:
async generatePullRequestDescription( async generatePullRequestDescription(
processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[] processedWorkitems: { workitem: Workitem; success: boolean; error?: string }[]
): Promise<string> { ): Promise<string> {
const generativeModel = this.vertexAI.getGenerativeModel({
model: this.model,
});
// Prepare workitem data for the prompt // Prepare workitem data for the prompt
const added: string[] = []; const added: string[] = [];
const updated: string[] = []; const updated: string[] = [];
@ -183,7 +201,7 @@ Include the following comment at the top of the generated file:
} }
} }
// Create a structured summary of changes for Gemini // Create a structured summary of changes
let workitemSummary = ''; let workitemSummary = '';
if (added.length > 0) { if (added.length > 0) {
@ -202,6 +220,24 @@ Include the following comment at the top of the generated file:
workitemSummary += 'Failed workitems:\n' + failed.join('\n') + '\n\n'; workitemSummary += 'Failed workitems:\n' + failed.join('\n') + '\n\n';
} }
// If dry run is enabled, return a mock PR description
if (DRY_RUN_SKIP_GEMINI) {
console.log(`[DRY RUN] Skipping Gemini API call for generating pull request description`);
return `# Automated PR: Update Workitems (DRY RUN)
This pull request was automatically generated by the prompts-to-test-spec function in dry run mode.
## Changes Summary
${workitemSummary}
*Note: This is a mock PR description generated during dry run. No actual Gemini API call was made.*`;
}
const generativeModel = this.vertexAI.getGenerativeModel({
model: this.model,
});
const prompt = ` const prompt = `
You are tasked with creating a pull request description for changes to test specifications. You are tasked with creating a pull request description for changes to test specifications.

View File

@ -16,7 +16,8 @@ import {
GOOGLE_CLOUD_PROJECT_ID, GOOGLE_CLOUD_PROJECT_ID,
GOOGLE_CLOUD_LOCATION, GOOGLE_CLOUD_LOCATION,
GEMINI_MODEL, GEMINI_MODEL,
USE_LOCAL_REPO USE_LOCAL_REPO,
DRY_RUN_SKIP_COMMITS
} from '../config'; } from '../config';
export class ProcessorService { export class ProcessorService {
@ -89,13 +90,20 @@ export class ProcessorService {
*/ */
async processProjects(): Promise<ProcessResult[]> { async processProjects(): Promise<ProcessResult[]> {
const results: ProcessResult[] = []; const results: ProcessResult[] = [];
let mainRepoPath: string;
try { try {
// Use local repository or clone the main repository // Use local repository or clone the main repository
let mainRepoPath: string;
if (USE_LOCAL_REPO) { if (USE_LOCAL_REPO) {
console.log('Using local repository path'); console.log('Using local repository path');
mainRepoPath = path.resolve(__dirname, '../../..'); // When running with functions-framework, we need to navigate up to the project root
// Check if we're in the prompts-to-test-spec directory and navigate up if needed
const currentDir = process.cwd();
if (currentDir.endsWith('prompts-to-test-spec')) {
mainRepoPath = path.resolve(currentDir, '../../..');
} else {
mainRepoPath = currentDir;
}
console.log(`Resolved local repository path: ${mainRepoPath}`); console.log(`Resolved local repository path: ${mainRepoPath}`);
} else { } else {
console.log(`Cloning main repository: ${this.mainRepoUrl}`); console.log(`Cloning main repository: ${this.mainRepoUrl}`);
@ -141,6 +149,13 @@ export class ProcessorService {
} }
console.log(`Finished processing all ${projects.length} projects`); console.log(`Finished processing all ${projects.length} projects`);
// Update workitem files with pull request URLs and commit changes
if (!DRY_RUN_SKIP_COMMITS) {
await this.updateWorkitemFilesWithPullRequestUrls(results, mainRepoPath);
} else {
console.log('[DRY RUN] Skipping workitem files update and commit');
}
return results; return results;
} catch (error) { } catch (error) {
console.error('Error processing projects:', error); console.error('Error processing projects:', error);
@ -148,6 +163,58 @@ export class ProcessorService {
} }
} }
/**
* Update workitem files with pull request URLs and commit changes to the main repository
* @param results Process results containing pull request URLs
* @param mainRepoPath Path to the main repository
*/
private async updateWorkitemFilesWithPullRequestUrls(results: ProcessResult[], mainRepoPath: string): Promise<void> {
console.log('Updating workitem files with pull request URLs...');
let updatedAnyWorkitem = false;
// Create a new branch for the changes
const branchName = `update-workitem-pr-urls-${new Date().toISOString().split('T')[0]}`;
await this.repositoryService.createBranch(mainRepoPath, branchName);
// Update each workitem file with its pull request URL
for (const result of results) {
if (!result.pullRequestUrl) {
console.log(`Skipping project ${result.project.name}: No pull request URL`);
continue;
}
for (const processedWorkitem of result.processedWorkitems) {
if (processedWorkitem.success) {
try {
console.log(`Updating workitem ${processedWorkitem.workitem.name} with PR URL: ${result.pullRequestUrl}`);
await this.projectService.updateWorkitemWithPullRequestUrl(
processedWorkitem.workitem,
result.pullRequestUrl
);
updatedAnyWorkitem = true;
} catch (error) {
console.error(`Error updating workitem ${processedWorkitem.workitem.name}:`, error);
}
}
}
}
// Commit and push changes if any workitems were updated
if (updatedAnyWorkitem) {
console.log('Committing changes to workitem files...');
await this.repositoryService.commitChanges(
mainRepoPath,
`Update workitem files with pull request URLs: ${new Date().toISOString().split('T')[0]}`
);
console.log('Pushing changes to main repository...');
await this.repositoryService.pushChanges(mainRepoPath, branchName, this.mainRepoCredentials);
console.log('Successfully updated workitem files with pull request URLs');
} else {
console.log('No workitem files were updated');
}
}
/** /**
* Process a single project * Process a single project
* @param project Project information * @param project Project information
@ -206,6 +273,16 @@ export class ProcessorService {
}; };
} }
// Skip creating commits/PRs if dry run is enabled
if (DRY_RUN_SKIP_COMMITS) {
console.log(`[DRY RUN] Skipping commit and PR creation for project ${project.name}`);
return {
project,
processedWorkitems,
pullRequestUrl: 'https://example.com/mock-pr-url (DRY RUN)'
};
}
// Commit changes // Commit changes
await this.repositoryService.commitChanges( await this.repositoryService.commitChanges(
projectRepoPath, projectRepoPath,

View File

@ -123,6 +123,7 @@ export class ProjectService {
const titleMatch = content.match(/## (.*)/); const titleMatch = content.match(/## (.*)/);
const jiraMatch = content.match(/- \[[ x]\] Jira: (.*)/); const jiraMatch = content.match(/- \[[ x]\] Jira: (.*)/);
const implementationMatch = content.match(/- \[[ x]\] Implementation: (.*)/); const implementationMatch = content.match(/- \[[ x]\] Implementation: (.*)/);
const pullRequestUrlMatch = content.match(/- \[[ x]\] Pull Request: (.*)/);
const activeMatch = content.match(/- \[([x ])\] Active/); const activeMatch = content.match(/- \[([x ])\] Active/);
// Extract description (everything between title and first metadata line) // Extract description (everything between title and first metadata line)
@ -154,6 +155,7 @@ export class ProjectService {
description, description,
jiraReference: jiraMatch ? jiraMatch[1].trim() : undefined, jiraReference: jiraMatch ? jiraMatch[1].trim() : undefined,
implementation: implementationMatch ? implementationMatch[1].trim() : undefined, implementation: implementationMatch ? implementationMatch[1].trim() : undefined,
pullRequestUrl: pullRequestUrlMatch ? pullRequestUrlMatch[1].trim() : undefined,
isActive isActive
}; };
} }
@ -172,4 +174,60 @@ export class ProjectService {
return fs.readFileSync(aiPath, 'utf-8'); return fs.readFileSync(aiPath, 'utf-8');
} }
/**
* Update workitem file with pull request URL
* @param workitem Workitem to update
* @param pullRequestUrl Pull request URL to add
* @returns Updated workitem
*/
async updateWorkitemWithPullRequestUrl(workitem: Workitem, pullRequestUrl: string): Promise<Workitem> {
if (!fs.existsSync(workitem.path)) {
throw new Error(`Workitem file not found: ${workitem.path}`);
}
// Read the current content
let content = fs.readFileSync(workitem.path, 'utf-8');
const lines = content.split('\n');
// Check if Pull Request line already exists
const pullRequestLineIndex = lines.findIndex(line => line.match(/- \[[ x]\] Pull Request:/));
if (pullRequestLineIndex >= 0) {
// Update existing line
lines[pullRequestLineIndex] = `- [x] Pull Request: ${pullRequestUrl}`;
} else {
// Find where to insert the new line (before Active line or at the end of metadata)
const activeLineIndex = lines.findIndex(line => line.match(/- \[[ x]\] Active/));
if (activeLineIndex >= 0) {
// Insert before Active line
lines.splice(activeLineIndex, 0, `- [x] Pull Request: ${pullRequestUrl}`);
} else {
// Find the last metadata line and insert after it
let lastMetadataIndex = -1;
for (let i = 0; i < lines.length; i++) {
if (lines[i].match(/- \[[ x]\]/)) {
lastMetadataIndex = i;
}
}
if (lastMetadataIndex >= 0) {
// Insert after the last metadata line
lines.splice(lastMetadataIndex + 1, 0, `- [x] Pull Request: ${pullRequestUrl}`);
} else {
// No metadata found, append to the end
lines.push(`- [x] Pull Request: ${pullRequestUrl}`);
}
}
}
// Write the updated content back to the file
const updatedContent = lines.join('\n');
fs.writeFileSync(workitem.path, updatedContent, 'utf-8');
// Update the workitem object
const updatedWorkitem = { ...workitem, pullRequestUrl };
return updatedWorkitem;
}
} }

View File

@ -17,6 +17,7 @@ export interface Workitem {
description: string; description: string;
jiraReference?: string; jiraReference?: string;
implementation?: string; implementation?: string;
pullRequestUrl?: string;
isActive: boolean; isActive: boolean;
} }